From ec0716636252f324fbde3fb8f14bce12157c8957 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:15:51 +0200 Subject: [PATCH 01/11] Installed flutterflow-socket --- run | 0 tools/github/listen | 0 tools/hotreloader.sh | 0 tools/sync | 0 watch | 0 5 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 run mode change 100644 => 100755 tools/github/listen mode change 100644 => 100755 tools/hotreloader.sh mode change 100644 => 100755 tools/sync mode change 100644 => 100755 watch diff --git a/run b/run old mode 100644 new mode 100755 diff --git a/tools/github/listen b/tools/github/listen old mode 100644 new mode 100755 diff --git a/tools/hotreloader.sh b/tools/hotreloader.sh old mode 100644 new mode 100755 diff --git a/tools/sync b/tools/sync old mode 100644 new mode 100755 diff --git a/watch b/watch old mode 100644 new mode 100755 From 738870752fa29c864d9467f487759474af9c2346 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:18:18 +0200 Subject: [PATCH 02/11] Uninstalled flutterflow-socket --- run | 13 - tools/github/.gitignore | 1 - tools/github/listen | 76 --- tools/github/package-lock.json | 1028 -------------------------------- tools/github/package.json | 20 - tools/hotreloader.sh | 22 - tools/sync | 24 - watch | 30 - 8 files changed, 1214 deletions(-) delete mode 100755 run delete mode 100644 tools/github/.gitignore delete mode 100755 tools/github/listen delete mode 100644 tools/github/package-lock.json delete mode 100644 tools/github/package.json delete mode 100755 tools/hotreloader.sh delete mode 100755 tools/sync delete mode 100755 watch diff --git a/run b/run deleted file mode 100755 index ee3a3b5..0000000 --- a/run +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -# USAGE i.e. -# ./run 000 - -if [ "$#" -ne 1 ] -then - echo "Usage: ./run " - exit 1 -fi - -# $1 is the flutter device ID -flutter run -d "$1" --pid-file /tmp/flutter.pid \ No newline at end of file diff --git a/tools/github/.gitignore b/tools/github/.gitignore deleted file mode 100644 index b512c09..0000000 --- a/tools/github/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules \ No newline at end of file diff --git a/tools/github/listen b/tools/github/listen deleted file mode 100755 index a3aa989..0000000 --- a/tools/github/listen +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env node - -// START - config -// 1. create webhook proxy at https://smee.io/new -// 2. set webhook URL and SECRET at https://github.com///settings/hooks/new -const GITHUB_WEBHOOK_URL = process.env.GITHUB_WEBHOOK_URL; // replace with your own GitHub Webhook Proxy URL -const GITHUB_WEBHOOK_SECRET = process.env.GITHUB_WEBHOOK_SECRET; // replace with your own GitHub Webhook Secret -// END - config - -const EventSource = require("eventsource"); -const { Webhooks } = require("@octokit/webhooks"); -const { exec } = require("child_process"); - -const webhooks = new Webhooks({ - secret: GITHUB_WEBHOOK_SECRET -}); - -const WEBHOOK_NAME = "push"; -const WEBHOOK_GIT_REF = "refs/heads/flutterflow"; -const WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME = "flutterflow-github-app[bot]"; - -webhooks.onAny(({ id, name, payload }) => { - // console.log(name, "event received", id, payload); - - if (name === WEBHOOK_NAME) { - if (payload.ref === WEBHOOK_GIT_REF) { - if (payload.head_commit.author.username === WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME) { - console.log("Commit from FlutterFlow"); - const commitId = payload.head_commit.id; - const commitTimestamp = payload.head_commit.timestamp; - const commitMessage = payload.head_commit.message; - console.log("Commit ID: " + commitId); - console.log("Commit Timestamp: " + commitTimestamp); - console.log("Commit Message: " + commitMessage); - console.log(""); - - const isDeepRequired = commitMessage.includes("DEEP"); - let syncScriptPrefix = ''; - if (isDeepRequired) { - syncScriptPrefix = 'DEEP=true '; - } - - console.log("Running GIT sync script..."); - exec(`${syncScriptPrefix}${__dirname}/../sync`, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - // console.log(`stderr: ${stderr}`); - // return; - } - // console.log(`stdout: ${stdout}`); - - console.log("GIT sync script completed."); - console.log(""); - }); - - } - } - } -}); - -const source = new EventSource(GITHUB_WEBHOOK_URL); -source.onmessage = (event) => { - const webhookEvent = JSON.parse(event.data); - webhooks - .verifyAndReceive({ - id: webhookEvent["x-request-id"], - name: webhookEvent["x-github-event"], - signature: webhookEvent["x-hub-signature"], - payload: webhookEvent.body, - }) - .catch(console.error); -}; -source.onopen = () => console.log("Listening for GIT commits from FlutterFlow...\n"); diff --git a/tools/github/package-lock.json b/tools/github/package-lock.json deleted file mode 100644 index ac1155c..0000000 --- a/tools/github/package-lock.json +++ /dev/null @@ -1,1028 +0,0 @@ -{ - "name": "github-auto-sync", - "version": "1.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "github-auto-sync", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "@octokit/webhooks": "^10.3.0", - "eventsource": "^2.0.2", - "smee-client": "^1.2.3" - }, - "devDependencies": { - "@types/node": "^18.11.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", - "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" - }, - "node_modules/@octokit/request-error": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", - "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", - "dependencies": { - "@octokit/types": "^8.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@octokit/types": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", - "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", - "dependencies": { - "@octokit/openapi-types": "^14.0.0" - } - }, - "node_modules/@octokit/webhooks": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", - "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", - "dependencies": { - "@octokit/request-error": "^3.0.0", - "@octokit/webhooks-methods": "^3.0.0", - "@octokit/webhooks-types": "6.5.0", - "aggregate-error": "^3.1.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@octokit/webhooks-methods": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", - "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", - "engines": { - "node": ">= 14" - } - }, - "node_modules/@octokit/webhooks-types": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", - "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" - }, - "node_modules/@types/node": { - "version": "18.11.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", - "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", - "dev": true - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - }, - "node_modules/basic-auth": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", - "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", - "dependencies": { - "safe-buffer": "5.1.2" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "engines": { - "node": ">=6" - } - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "node_modules/component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" - }, - "node_modules/cookiejar": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", - "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" - }, - "node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/dezalgo": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", - "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", - "dependencies": { - "asap": "^2.0.0", - "wrappy": "1" - } - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "node_modules/eventsource": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" - }, - "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/formidable": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", - "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", - "dependencies": { - "dezalgo": "1.0.3", - "hexoid": "1.0.0", - "once": "1.4.0", - "qs": "6.9.3" - }, - "funding": { - "url": "https://ko-fi.com/tunnckoCore/commissions" - } - }, - "node_modules/formidable/node_modules/qs": { - "version": "6.9.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", - "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "node_modules/get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hexoid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", - "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", - "engines": { - "node": ">=8" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/morgan": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", - "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", - "dependencies": { - "basic-auth": "~2.0.1", - "debug": "2.6.9", - "depd": "~2.0.0", - "on-finished": "~2.3.0", - "on-headers": "~1.0.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/smee-client": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", - "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", - "dependencies": { - "commander": "^2.19.0", - "eventsource": "^1.1.0", - "morgan": "^1.9.1", - "superagent": "^7.1.3", - "validator": "^13.7.0" - }, - "bin": { - "smee": "bin/smee.js" - } - }, - "node_modules/smee-client/node_modules/eventsource": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", - "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string_decoder/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/superagent": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", - "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", - "dependencies": { - "component-emitter": "^1.3.0", - "cookiejar": "^2.1.3", - "debug": "^4.3.4", - "fast-safe-stringify": "^2.1.1", - "form-data": "^4.0.0", - "formidable": "^2.0.1", - "methods": "^1.1.2", - "mime": "^2.5.0", - "qs": "^6.10.3", - "readable-stream": "^3.6.0", - "semver": "^7.3.7" - }, - "engines": { - "node": ">=6.4.0 <13 || >=14" - } - }, - "node_modules/superagent/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/superagent/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" - }, - "node_modules/validator": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", - "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - }, - "dependencies": { - "@octokit/openapi-types": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", - "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" - }, - "@octokit/request-error": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", - "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", - "requires": { - "@octokit/types": "^8.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", - "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", - "requires": { - "@octokit/openapi-types": "^14.0.0" - } - }, - "@octokit/webhooks": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", - "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", - "requires": { - "@octokit/request-error": "^3.0.0", - "@octokit/webhooks-methods": "^3.0.0", - "@octokit/webhooks-types": "6.5.0", - "aggregate-error": "^3.1.0" - } - }, - "@octokit/webhooks-methods": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", - "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==" - }, - "@octokit/webhooks-types": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", - "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" - }, - "@types/node": { - "version": "18.11.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", - "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", - "dev": true - }, - "aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "requires": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - } - }, - "asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - }, - "basic-auth": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", - "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", - "requires": { - "safe-buffer": "5.1.2" - } - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" - }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" - }, - "cookiejar": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", - "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" - }, - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "dezalgo": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", - "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", - "requires": { - "asap": "^2.0.0", - "wrappy": "1" - } - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "eventsource": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" - }, - "fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" - }, - "form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - } - }, - "formidable": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", - "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", - "requires": { - "dezalgo": "1.0.3", - "hexoid": "1.0.0", - "once": "1.4.0", - "qs": "6.9.3" - }, - "dependencies": { - "qs": { - "version": "6.9.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", - "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==" - } - } - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "hexoid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", - "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==" - }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" - }, - "mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" - }, - "mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" - }, - "mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "requires": { - "mime-db": "1.52.0" - } - }, - "morgan": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", - "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", - "requires": { - "basic-auth": "~2.0.1", - "debug": "2.6.9", - "depd": "~2.0.0", - "on-finished": "~2.3.0", - "on-headers": "~1.0.2" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", - "requires": { - "ee-first": "1.1.1" - } - }, - "on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "requires": { - "side-channel": "^1.0.4" - } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - } - }, - "smee-client": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", - "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", - "requires": { - "commander": "^2.19.0", - "eventsource": "^1.1.0", - "morgan": "^1.9.1", - "superagent": "^7.1.3", - "validator": "^13.7.0" - }, - "dependencies": { - "eventsource": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", - "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==" - } - } - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - } - } - }, - "superagent": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", - "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", - "requires": { - "component-emitter": "^1.3.0", - "cookiejar": "^2.1.3", - "debug": "^4.3.4", - "fast-safe-stringify": "^2.1.1", - "form-data": "^4.0.0", - "formidable": "^2.0.1", - "methods": "^1.1.2", - "mime": "^2.5.0", - "qs": "^6.10.3", - "readable-stream": "^3.6.0", - "semver": "^7.3.7" - }, - "dependencies": { - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "requires": { - "ms": "2.1.2" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - } - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" - }, - "validator": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", - "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==" - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } -} diff --git a/tools/github/package.json b/tools/github/package.json deleted file mode 100644 index c6d329d..0000000 --- a/tools/github/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "github-auto-sync", - "version": "1.0.0", - "description": "", - "main": "listen", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "author": "Matija Stepanic", - "license": "MIT", - "dependencies": { - "@octokit/webhooks": "^10.3.0", - "eventsource": "^2.0.2", - "smee-client": "^1.2.3" - }, - "devDependencies": { - "@types/node": "^18.11.0" - }, - "type": "commonjs" -} diff --git a/tools/hotreloader.sh b/tools/hotreloader.sh deleted file mode 100755 index beb4259..0000000 --- a/tools/hotreloader.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -set -euo pipefail -PIDFILES=($(compgen -G "/tmp/flutter*.pid")) - -trap 'pkill -P $$; exit' SIGINT SIGTERM - -if [[ "${1-}" != "" && ${#PIDFILES[@]} > 0 ]]; then - echo $1 - PIDS="" - for pf in ${PIDFILES[@]}; do - PIDS+=$(cat $pf) - PIDS+=" " - done - if [[ "$1" =~ \/assets\/ || "$1" =~ \/state\/ || "$1" =~ backend ]]; then - echo "Flutter - Restarting ${PIDS}" - kill -USR2 $PIDS - else - echo "Flutter - Reloading ${PIDS}" - kill -USR1 $PIDS - fi -fi \ No newline at end of file diff --git a/tools/sync b/tools/sync deleted file mode 100755 index ab7908a..0000000 --- a/tools/sync +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -git config pull.rebase false -export GIT_MERGE_AUTOEDIT=no - -git add . -git commit -m "Local changes before sync" -git push - -if ! git pull origin flutterflow; then - exit 1 # break execution if GIT conflict occured -fi - -if [ "$DEEP" = true ] ; then - - flutter pub get - flutter packages pub run build_runner build --delete-conflicting-outputs - flutter pub run flutter_launcher_icons:main - -fi - -git add . -git commit -m "Local changes after sync" -git push \ No newline at end of file diff --git a/watch b/watch deleted file mode 100755 index 5062481..0000000 --- a/watch +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/" -export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" -# ------- END CONFIGURATION ------------ - -echo "Flutter hotreload watch for a DART files changes..." - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -trap ctrl_c INT -function ctrl_c() { - # echo "** Trapped CTRL-C" - kill -9 $child_1_pid - kill -9 "$$" -} - -# REQUIREMENTS -# 1. Run `npm install -- prefix ./tools/github` -# 2. Setup GITHUB_WEBHOOK_URL and GITHUB_WEBHOOK_SECRET in this script and in the GitHub Webhook settings -$SCRIPT_DIR/./tools/github/listen & -child_1_pid=$! - -while true -do - # 3. install `entr` with `brew install entr` - find "$SCRIPT_DIR/./lib/" -name '*.dart' | entr -dnp "$SCRIPT_DIR/./tools/hotreloader.sh" /_ - sleep 1 -done \ No newline at end of file From de8db30c8cf10c5668160862a09ccce55dc1374d Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:18:30 +0200 Subject: [PATCH 03/11] Installed flutterflow-socket --- run | 13 + tools/github/.gitignore | 1 + tools/github/listen | 76 +++ tools/github/package-lock.json | 1028 ++++++++++++++++++++++++++++++++ tools/github/package.json | 20 + tools/hotreloader.sh | 22 + tools/sync | 24 + watch | 30 + 8 files changed, 1214 insertions(+) create mode 100755 run create mode 100644 tools/github/.gitignore create mode 100755 tools/github/listen create mode 100644 tools/github/package-lock.json create mode 100644 tools/github/package.json create mode 100755 tools/hotreloader.sh create mode 100755 tools/sync create mode 100755 watch diff --git a/run b/run new file mode 100755 index 0000000..ee3a3b5 --- /dev/null +++ b/run @@ -0,0 +1,13 @@ +#!/bin/bash + +# USAGE i.e. +# ./run 000 + +if [ "$#" -ne 1 ] +then + echo "Usage: ./run " + exit 1 +fi + +# $1 is the flutter device ID +flutter run -d "$1" --pid-file /tmp/flutter.pid \ No newline at end of file diff --git a/tools/github/.gitignore b/tools/github/.gitignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/tools/github/.gitignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/tools/github/listen b/tools/github/listen new file mode 100755 index 0000000..a3aa989 --- /dev/null +++ b/tools/github/listen @@ -0,0 +1,76 @@ +#!/usr/bin/env node + +// START - config +// 1. create webhook proxy at https://smee.io/new +// 2. set webhook URL and SECRET at https://github.com///settings/hooks/new +const GITHUB_WEBHOOK_URL = process.env.GITHUB_WEBHOOK_URL; // replace with your own GitHub Webhook Proxy URL +const GITHUB_WEBHOOK_SECRET = process.env.GITHUB_WEBHOOK_SECRET; // replace with your own GitHub Webhook Secret +// END - config + +const EventSource = require("eventsource"); +const { Webhooks } = require("@octokit/webhooks"); +const { exec } = require("child_process"); + +const webhooks = new Webhooks({ + secret: GITHUB_WEBHOOK_SECRET +}); + +const WEBHOOK_NAME = "push"; +const WEBHOOK_GIT_REF = "refs/heads/flutterflow"; +const WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME = "flutterflow-github-app[bot]"; + +webhooks.onAny(({ id, name, payload }) => { + // console.log(name, "event received", id, payload); + + if (name === WEBHOOK_NAME) { + if (payload.ref === WEBHOOK_GIT_REF) { + if (payload.head_commit.author.username === WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME) { + console.log("Commit from FlutterFlow"); + const commitId = payload.head_commit.id; + const commitTimestamp = payload.head_commit.timestamp; + const commitMessage = payload.head_commit.message; + console.log("Commit ID: " + commitId); + console.log("Commit Timestamp: " + commitTimestamp); + console.log("Commit Message: " + commitMessage); + console.log(""); + + const isDeepRequired = commitMessage.includes("DEEP"); + let syncScriptPrefix = ''; + if (isDeepRequired) { + syncScriptPrefix = 'DEEP=true '; + } + + console.log("Running GIT sync script..."); + exec(`${syncScriptPrefix}${__dirname}/../sync`, (error, stdout, stderr) => { + if (error) { + console.log(`error: ${error.message}`); + return; + } + if (stderr) { + // console.log(`stderr: ${stderr}`); + // return; + } + // console.log(`stdout: ${stdout}`); + + console.log("GIT sync script completed."); + console.log(""); + }); + + } + } + } +}); + +const source = new EventSource(GITHUB_WEBHOOK_URL); +source.onmessage = (event) => { + const webhookEvent = JSON.parse(event.data); + webhooks + .verifyAndReceive({ + id: webhookEvent["x-request-id"], + name: webhookEvent["x-github-event"], + signature: webhookEvent["x-hub-signature"], + payload: webhookEvent.body, + }) + .catch(console.error); +}; +source.onopen = () => console.log("Listening for GIT commits from FlutterFlow...\n"); diff --git a/tools/github/package-lock.json b/tools/github/package-lock.json new file mode 100644 index 0000000..ac1155c --- /dev/null +++ b/tools/github/package-lock.json @@ -0,0 +1,1028 @@ +{ + "name": "github-auto-sync", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "github-auto-sync", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "node_modules/@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "dependencies": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "dependencies": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "node_modules/@types/node": { + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", + "dev": true + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "node_modules/cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "dependencies": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formidable/node_modules/qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "engines": { + "node": ">=8" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "dependencies": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "bin": { + "smee": "bin/smee.js" + } + }, + "node_modules/smee-client/node_modules/eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/superagent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "requires": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "requires": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "requires": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + } + }, + "@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==" + }, + "@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "@types/node": { + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", + "dev": true + }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "requires": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "requires": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "dependencies": { + "qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==" + } + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==" + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" + }, + "mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "requires": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "requires": { + "ee-first": "1.1.1" + } + }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "requires": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "dependencies": { + "eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==" + } + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } + } + }, + "superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "requires": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "dependencies": { + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } +} diff --git a/tools/github/package.json b/tools/github/package.json new file mode 100644 index 0000000..c6d329d --- /dev/null +++ b/tools/github/package.json @@ -0,0 +1,20 @@ +{ + "name": "github-auto-sync", + "version": "1.0.0", + "description": "", + "main": "listen", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "Matija Stepanic", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + }, + "type": "commonjs" +} diff --git a/tools/hotreloader.sh b/tools/hotreloader.sh new file mode 100755 index 0000000..beb4259 --- /dev/null +++ b/tools/hotreloader.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -euo pipefail +PIDFILES=($(compgen -G "/tmp/flutter*.pid")) + +trap 'pkill -P $$; exit' SIGINT SIGTERM + +if [[ "${1-}" != "" && ${#PIDFILES[@]} > 0 ]]; then + echo $1 + PIDS="" + for pf in ${PIDFILES[@]}; do + PIDS+=$(cat $pf) + PIDS+=" " + done + if [[ "$1" =~ \/assets\/ || "$1" =~ \/state\/ || "$1" =~ backend ]]; then + echo "Flutter - Restarting ${PIDS}" + kill -USR2 $PIDS + else + echo "Flutter - Reloading ${PIDS}" + kill -USR1 $PIDS + fi +fi \ No newline at end of file diff --git a/tools/sync b/tools/sync new file mode 100755 index 0000000..ab7908a --- /dev/null +++ b/tools/sync @@ -0,0 +1,24 @@ +#!/bin/bash + +git config pull.rebase false +export GIT_MERGE_AUTOEDIT=no + +git add . +git commit -m "Local changes before sync" +git push + +if ! git pull origin flutterflow; then + exit 1 # break execution if GIT conflict occured +fi + +if [ "$DEEP" = true ] ; then + + flutter pub get + flutter packages pub run build_runner build --delete-conflicting-outputs + flutter pub run flutter_launcher_icons:main + +fi + +git add . +git commit -m "Local changes after sync" +git push \ No newline at end of file diff --git a/watch b/watch new file mode 100755 index 0000000..5062481 --- /dev/null +++ b/watch @@ -0,0 +1,30 @@ +#!/bin/bash + +# ------- CONFIGURATION ---------------- +export GITHUB_WEBHOOK_URL="https://smee.io/" +export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" +# ------- END CONFIGURATION ------------ + +echo "Flutter hotreload watch for a DART files changes..." + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +trap ctrl_c INT +function ctrl_c() { + # echo "** Trapped CTRL-C" + kill -9 $child_1_pid + kill -9 "$$" +} + +# REQUIREMENTS +# 1. Run `npm install -- prefix ./tools/github` +# 2. Setup GITHUB_WEBHOOK_URL and GITHUB_WEBHOOK_SECRET in this script and in the GitHub Webhook settings +$SCRIPT_DIR/./tools/github/listen & +child_1_pid=$! + +while true +do + # 3. install `entr` with `brew install entr` + find "$SCRIPT_DIR/./lib/" -name '*.dart' | entr -dnp "$SCRIPT_DIR/./tools/hotreloader.sh" /_ + sleep 1 +done \ No newline at end of file From 615ae158d7ddeb1b350a8e6837c099d5b3bfe34d Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:19:15 +0200 Subject: [PATCH 04/11] Uninstalled flutterflow-socket --- run | 13 - tools/github/.gitignore | 1 - tools/github/listen | 76 --- tools/github/package-lock.json | 1028 -------------------------------- tools/github/package.json | 20 - tools/hotreloader.sh | 22 - tools/sync | 24 - watch | 30 - 8 files changed, 1214 deletions(-) delete mode 100755 run delete mode 100644 tools/github/.gitignore delete mode 100755 tools/github/listen delete mode 100644 tools/github/package-lock.json delete mode 100644 tools/github/package.json delete mode 100755 tools/hotreloader.sh delete mode 100755 tools/sync delete mode 100755 watch diff --git a/run b/run deleted file mode 100755 index ee3a3b5..0000000 --- a/run +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -# USAGE i.e. -# ./run 000 - -if [ "$#" -ne 1 ] -then - echo "Usage: ./run " - exit 1 -fi - -# $1 is the flutter device ID -flutter run -d "$1" --pid-file /tmp/flutter.pid \ No newline at end of file diff --git a/tools/github/.gitignore b/tools/github/.gitignore deleted file mode 100644 index b512c09..0000000 --- a/tools/github/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules \ No newline at end of file diff --git a/tools/github/listen b/tools/github/listen deleted file mode 100755 index a3aa989..0000000 --- a/tools/github/listen +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env node - -// START - config -// 1. create webhook proxy at https://smee.io/new -// 2. set webhook URL and SECRET at https://github.com///settings/hooks/new -const GITHUB_WEBHOOK_URL = process.env.GITHUB_WEBHOOK_URL; // replace with your own GitHub Webhook Proxy URL -const GITHUB_WEBHOOK_SECRET = process.env.GITHUB_WEBHOOK_SECRET; // replace with your own GitHub Webhook Secret -// END - config - -const EventSource = require("eventsource"); -const { Webhooks } = require("@octokit/webhooks"); -const { exec } = require("child_process"); - -const webhooks = new Webhooks({ - secret: GITHUB_WEBHOOK_SECRET -}); - -const WEBHOOK_NAME = "push"; -const WEBHOOK_GIT_REF = "refs/heads/flutterflow"; -const WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME = "flutterflow-github-app[bot]"; - -webhooks.onAny(({ id, name, payload }) => { - // console.log(name, "event received", id, payload); - - if (name === WEBHOOK_NAME) { - if (payload.ref === WEBHOOK_GIT_REF) { - if (payload.head_commit.author.username === WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME) { - console.log("Commit from FlutterFlow"); - const commitId = payload.head_commit.id; - const commitTimestamp = payload.head_commit.timestamp; - const commitMessage = payload.head_commit.message; - console.log("Commit ID: " + commitId); - console.log("Commit Timestamp: " + commitTimestamp); - console.log("Commit Message: " + commitMessage); - console.log(""); - - const isDeepRequired = commitMessage.includes("DEEP"); - let syncScriptPrefix = ''; - if (isDeepRequired) { - syncScriptPrefix = 'DEEP=true '; - } - - console.log("Running GIT sync script..."); - exec(`${syncScriptPrefix}${__dirname}/../sync`, (error, stdout, stderr) => { - if (error) { - console.log(`error: ${error.message}`); - return; - } - if (stderr) { - // console.log(`stderr: ${stderr}`); - // return; - } - // console.log(`stdout: ${stdout}`); - - console.log("GIT sync script completed."); - console.log(""); - }); - - } - } - } -}); - -const source = new EventSource(GITHUB_WEBHOOK_URL); -source.onmessage = (event) => { - const webhookEvent = JSON.parse(event.data); - webhooks - .verifyAndReceive({ - id: webhookEvent["x-request-id"], - name: webhookEvent["x-github-event"], - signature: webhookEvent["x-hub-signature"], - payload: webhookEvent.body, - }) - .catch(console.error); -}; -source.onopen = () => console.log("Listening for GIT commits from FlutterFlow...\n"); diff --git a/tools/github/package-lock.json b/tools/github/package-lock.json deleted file mode 100644 index ac1155c..0000000 --- a/tools/github/package-lock.json +++ /dev/null @@ -1,1028 +0,0 @@ -{ - "name": "github-auto-sync", - "version": "1.0.0", - "lockfileVersion": 2, - "requires": true, - "packages": { - "": { - "name": "github-auto-sync", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "@octokit/webhooks": "^10.3.0", - "eventsource": "^2.0.2", - "smee-client": "^1.2.3" - }, - "devDependencies": { - "@types/node": "^18.11.0" - } - }, - "node_modules/@octokit/openapi-types": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", - "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" - }, - "node_modules/@octokit/request-error": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", - "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", - "dependencies": { - "@octokit/types": "^8.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@octokit/types": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", - "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", - "dependencies": { - "@octokit/openapi-types": "^14.0.0" - } - }, - "node_modules/@octokit/webhooks": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", - "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", - "dependencies": { - "@octokit/request-error": "^3.0.0", - "@octokit/webhooks-methods": "^3.0.0", - "@octokit/webhooks-types": "6.5.0", - "aggregate-error": "^3.1.0" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/@octokit/webhooks-methods": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", - "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", - "engines": { - "node": ">= 14" - } - }, - "node_modules/@octokit/webhooks-types": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", - "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" - }, - "node_modules/@types/node": { - "version": "18.11.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", - "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", - "dev": true - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - }, - "node_modules/basic-auth": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", - "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", - "dependencies": { - "safe-buffer": "5.1.2" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "engines": { - "node": ">=6" - } - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "node_modules/component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" - }, - "node_modules/cookiejar": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", - "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" - }, - "node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/dezalgo": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", - "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", - "dependencies": { - "asap": "^2.0.0", - "wrappy": "1" - } - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "node_modules/eventsource": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" - }, - "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/formidable": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", - "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", - "dependencies": { - "dezalgo": "1.0.3", - "hexoid": "1.0.0", - "once": "1.4.0", - "qs": "6.9.3" - }, - "funding": { - "url": "https://ko-fi.com/tunnckoCore/commissions" - } - }, - "node_modules/formidable/node_modules/qs": { - "version": "6.9.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", - "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "node_modules/get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hexoid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", - "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", - "engines": { - "node": ">=8" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/morgan": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", - "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", - "dependencies": { - "basic-auth": "~2.0.1", - "debug": "2.6.9", - "depd": "~2.0.0", - "on-finished": "~2.3.0", - "on-headers": "~1.0.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/smee-client": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", - "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", - "dependencies": { - "commander": "^2.19.0", - "eventsource": "^1.1.0", - "morgan": "^1.9.1", - "superagent": "^7.1.3", - "validator": "^13.7.0" - }, - "bin": { - "smee": "bin/smee.js" - } - }, - "node_modules/smee-client/node_modules/eventsource": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", - "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string_decoder/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/superagent": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", - "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", - "dependencies": { - "component-emitter": "^1.3.0", - "cookiejar": "^2.1.3", - "debug": "^4.3.4", - "fast-safe-stringify": "^2.1.1", - "form-data": "^4.0.0", - "formidable": "^2.0.1", - "methods": "^1.1.2", - "mime": "^2.5.0", - "qs": "^6.10.3", - "readable-stream": "^3.6.0", - "semver": "^7.3.7" - }, - "engines": { - "node": ">=6.4.0 <13 || >=14" - } - }, - "node_modules/superagent/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/superagent/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" - }, - "node_modules/validator": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", - "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - }, - "dependencies": { - "@octokit/openapi-types": { - "version": "14.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", - "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" - }, - "@octokit/request-error": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", - "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", - "requires": { - "@octokit/types": "^8.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" - } - }, - "@octokit/types": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", - "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", - "requires": { - "@octokit/openapi-types": "^14.0.0" - } - }, - "@octokit/webhooks": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", - "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", - "requires": { - "@octokit/request-error": "^3.0.0", - "@octokit/webhooks-methods": "^3.0.0", - "@octokit/webhooks-types": "6.5.0", - "aggregate-error": "^3.1.0" - } - }, - "@octokit/webhooks-methods": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", - "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==" - }, - "@octokit/webhooks-types": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", - "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" - }, - "@types/node": { - "version": "18.11.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", - "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", - "dev": true - }, - "aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "requires": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - } - }, - "asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - }, - "basic-auth": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", - "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", - "requires": { - "safe-buffer": "5.1.2" - } - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" - }, - "combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - }, - "component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" - }, - "cookiejar": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", - "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" - }, - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" - }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "dezalgo": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", - "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", - "requires": { - "asap": "^2.0.0", - "wrappy": "1" - } - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "eventsource": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", - "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" - }, - "fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" - }, - "form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - } - }, - "formidable": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", - "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", - "requires": { - "dezalgo": "1.0.3", - "hexoid": "1.0.0", - "once": "1.4.0", - "qs": "6.9.3" - }, - "dependencies": { - "qs": { - "version": "6.9.3", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", - "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==" - } - } - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "hexoid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", - "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==" - }, - "indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" - }, - "mime": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", - "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" - }, - "mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" - }, - "mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "requires": { - "mime-db": "1.52.0" - } - }, - "morgan": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", - "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", - "requires": { - "basic-auth": "~2.0.1", - "debug": "2.6.9", - "depd": "~2.0.0", - "on-finished": "~2.3.0", - "on-headers": "~1.0.2" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", - "requires": { - "ee-first": "1.1.1" - } - }, - "on-headers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", - "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { - "wrappy": "1" - } - }, - "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "requires": { - "side-channel": "^1.0.4" - } - }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - } - }, - "smee-client": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", - "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", - "requires": { - "commander": "^2.19.0", - "eventsource": "^1.1.0", - "morgan": "^1.9.1", - "superagent": "^7.1.3", - "validator": "^13.7.0" - }, - "dependencies": { - "eventsource": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", - "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==" - } - } - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - } - } - }, - "superagent": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", - "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", - "requires": { - "component-emitter": "^1.3.0", - "cookiejar": "^2.1.3", - "debug": "^4.3.4", - "fast-safe-stringify": "^2.1.1", - "form-data": "^4.0.0", - "formidable": "^2.0.1", - "methods": "^1.1.2", - "mime": "^2.5.0", - "qs": "^6.10.3", - "readable-stream": "^3.6.0", - "semver": "^7.3.7" - }, - "dependencies": { - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "requires": { - "ms": "2.1.2" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - } - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" - }, - "validator": { - "version": "13.7.0", - "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", - "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==" - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } -} diff --git a/tools/github/package.json b/tools/github/package.json deleted file mode 100644 index c6d329d..0000000 --- a/tools/github/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "github-auto-sync", - "version": "1.0.0", - "description": "", - "main": "listen", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "author": "Matija Stepanic", - "license": "MIT", - "dependencies": { - "@octokit/webhooks": "^10.3.0", - "eventsource": "^2.0.2", - "smee-client": "^1.2.3" - }, - "devDependencies": { - "@types/node": "^18.11.0" - }, - "type": "commonjs" -} diff --git a/tools/hotreloader.sh b/tools/hotreloader.sh deleted file mode 100755 index beb4259..0000000 --- a/tools/hotreloader.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -set -euo pipefail -PIDFILES=($(compgen -G "/tmp/flutter*.pid")) - -trap 'pkill -P $$; exit' SIGINT SIGTERM - -if [[ "${1-}" != "" && ${#PIDFILES[@]} > 0 ]]; then - echo $1 - PIDS="" - for pf in ${PIDFILES[@]}; do - PIDS+=$(cat $pf) - PIDS+=" " - done - if [[ "$1" =~ \/assets\/ || "$1" =~ \/state\/ || "$1" =~ backend ]]; then - echo "Flutter - Restarting ${PIDS}" - kill -USR2 $PIDS - else - echo "Flutter - Reloading ${PIDS}" - kill -USR1 $PIDS - fi -fi \ No newline at end of file diff --git a/tools/sync b/tools/sync deleted file mode 100755 index ab7908a..0000000 --- a/tools/sync +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -git config pull.rebase false -export GIT_MERGE_AUTOEDIT=no - -git add . -git commit -m "Local changes before sync" -git push - -if ! git pull origin flutterflow; then - exit 1 # break execution if GIT conflict occured -fi - -if [ "$DEEP" = true ] ; then - - flutter pub get - flutter packages pub run build_runner build --delete-conflicting-outputs - flutter pub run flutter_launcher_icons:main - -fi - -git add . -git commit -m "Local changes after sync" -git push \ No newline at end of file diff --git a/watch b/watch deleted file mode 100755 index 5062481..0000000 --- a/watch +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/" -export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" -# ------- END CONFIGURATION ------------ - -echo "Flutter hotreload watch for a DART files changes..." - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) - -trap ctrl_c INT -function ctrl_c() { - # echo "** Trapped CTRL-C" - kill -9 $child_1_pid - kill -9 "$$" -} - -# REQUIREMENTS -# 1. Run `npm install -- prefix ./tools/github` -# 2. Setup GITHUB_WEBHOOK_URL and GITHUB_WEBHOOK_SECRET in this script and in the GitHub Webhook settings -$SCRIPT_DIR/./tools/github/listen & -child_1_pid=$! - -while true -do - # 3. install `entr` with `brew install entr` - find "$SCRIPT_DIR/./lib/" -name '*.dart' | entr -dnp "$SCRIPT_DIR/./tools/hotreloader.sh" /_ - sleep 1 -done \ No newline at end of file From c3f0fe450f349400868cb0d5f4aa9125c38aa974 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:19:29 +0200 Subject: [PATCH 05/11] Installed flutterflow-socket --- run | 13 + tools/github/.gitignore | 1 + tools/github/listen | 76 +++ tools/github/package-lock.json | 1028 ++++++++++++++++++++++++++++++++ tools/github/package.json | 20 + tools/hotreloader.sh | 22 + tools/sync | 24 + watch | 30 + 8 files changed, 1214 insertions(+) create mode 100755 run create mode 100644 tools/github/.gitignore create mode 100755 tools/github/listen create mode 100644 tools/github/package-lock.json create mode 100644 tools/github/package.json create mode 100755 tools/hotreloader.sh create mode 100755 tools/sync create mode 100755 watch diff --git a/run b/run new file mode 100755 index 0000000..ee3a3b5 --- /dev/null +++ b/run @@ -0,0 +1,13 @@ +#!/bin/bash + +# USAGE i.e. +# ./run 000 + +if [ "$#" -ne 1 ] +then + echo "Usage: ./run " + exit 1 +fi + +# $1 is the flutter device ID +flutter run -d "$1" --pid-file /tmp/flutter.pid \ No newline at end of file diff --git a/tools/github/.gitignore b/tools/github/.gitignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/tools/github/.gitignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/tools/github/listen b/tools/github/listen new file mode 100755 index 0000000..a3aa989 --- /dev/null +++ b/tools/github/listen @@ -0,0 +1,76 @@ +#!/usr/bin/env node + +// START - config +// 1. create webhook proxy at https://smee.io/new +// 2. set webhook URL and SECRET at https://github.com///settings/hooks/new +const GITHUB_WEBHOOK_URL = process.env.GITHUB_WEBHOOK_URL; // replace with your own GitHub Webhook Proxy URL +const GITHUB_WEBHOOK_SECRET = process.env.GITHUB_WEBHOOK_SECRET; // replace with your own GitHub Webhook Secret +// END - config + +const EventSource = require("eventsource"); +const { Webhooks } = require("@octokit/webhooks"); +const { exec } = require("child_process"); + +const webhooks = new Webhooks({ + secret: GITHUB_WEBHOOK_SECRET +}); + +const WEBHOOK_NAME = "push"; +const WEBHOOK_GIT_REF = "refs/heads/flutterflow"; +const WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME = "flutterflow-github-app[bot]"; + +webhooks.onAny(({ id, name, payload }) => { + // console.log(name, "event received", id, payload); + + if (name === WEBHOOK_NAME) { + if (payload.ref === WEBHOOK_GIT_REF) { + if (payload.head_commit.author.username === WEBHOOK_GIT_HEAD_COMMIT_AUTHOR_USERNAME) { + console.log("Commit from FlutterFlow"); + const commitId = payload.head_commit.id; + const commitTimestamp = payload.head_commit.timestamp; + const commitMessage = payload.head_commit.message; + console.log("Commit ID: " + commitId); + console.log("Commit Timestamp: " + commitTimestamp); + console.log("Commit Message: " + commitMessage); + console.log(""); + + const isDeepRequired = commitMessage.includes("DEEP"); + let syncScriptPrefix = ''; + if (isDeepRequired) { + syncScriptPrefix = 'DEEP=true '; + } + + console.log("Running GIT sync script..."); + exec(`${syncScriptPrefix}${__dirname}/../sync`, (error, stdout, stderr) => { + if (error) { + console.log(`error: ${error.message}`); + return; + } + if (stderr) { + // console.log(`stderr: ${stderr}`); + // return; + } + // console.log(`stdout: ${stdout}`); + + console.log("GIT sync script completed."); + console.log(""); + }); + + } + } + } +}); + +const source = new EventSource(GITHUB_WEBHOOK_URL); +source.onmessage = (event) => { + const webhookEvent = JSON.parse(event.data); + webhooks + .verifyAndReceive({ + id: webhookEvent["x-request-id"], + name: webhookEvent["x-github-event"], + signature: webhookEvent["x-hub-signature"], + payload: webhookEvent.body, + }) + .catch(console.error); +}; +source.onopen = () => console.log("Listening for GIT commits from FlutterFlow...\n"); diff --git a/tools/github/package-lock.json b/tools/github/package-lock.json new file mode 100644 index 0000000..ac1155c --- /dev/null +++ b/tools/github/package-lock.json @@ -0,0 +1,1028 @@ +{ + "name": "github-auto-sync", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "github-auto-sync", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "node_modules/@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "dependencies": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "dependencies": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "node_modules/@types/node": { + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", + "dev": true + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "node_modules/cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "dependencies": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formidable/node_modules/qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "engines": { + "node": ">=8" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "dependencies": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "bin": { + "smee": "bin/smee.js" + } + }, + "node_modules/smee-client/node_modules/eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/superagent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "requires": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "requires": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "requires": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + } + }, + "@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==" + }, + "@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "@types/node": { + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", + "dev": true + }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "requires": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "requires": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "dependencies": { + "qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==" + } + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==" + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" + }, + "mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "requires": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "requires": { + "ee-first": "1.1.1" + } + }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "requires": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "dependencies": { + "eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==" + } + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } + } + }, + "superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "requires": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "dependencies": { + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } +} diff --git a/tools/github/package.json b/tools/github/package.json new file mode 100644 index 0000000..c6d329d --- /dev/null +++ b/tools/github/package.json @@ -0,0 +1,20 @@ +{ + "name": "github-auto-sync", + "version": "1.0.0", + "description": "", + "main": "listen", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "Matija Stepanic", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + }, + "type": "commonjs" +} diff --git a/tools/hotreloader.sh b/tools/hotreloader.sh new file mode 100755 index 0000000..beb4259 --- /dev/null +++ b/tools/hotreloader.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -euo pipefail +PIDFILES=($(compgen -G "/tmp/flutter*.pid")) + +trap 'pkill -P $$; exit' SIGINT SIGTERM + +if [[ "${1-}" != "" && ${#PIDFILES[@]} > 0 ]]; then + echo $1 + PIDS="" + for pf in ${PIDFILES[@]}; do + PIDS+=$(cat $pf) + PIDS+=" " + done + if [[ "$1" =~ \/assets\/ || "$1" =~ \/state\/ || "$1" =~ backend ]]; then + echo "Flutter - Restarting ${PIDS}" + kill -USR2 $PIDS + else + echo "Flutter - Reloading ${PIDS}" + kill -USR1 $PIDS + fi +fi \ No newline at end of file diff --git a/tools/sync b/tools/sync new file mode 100755 index 0000000..ab7908a --- /dev/null +++ b/tools/sync @@ -0,0 +1,24 @@ +#!/bin/bash + +git config pull.rebase false +export GIT_MERGE_AUTOEDIT=no + +git add . +git commit -m "Local changes before sync" +git push + +if ! git pull origin flutterflow; then + exit 1 # break execution if GIT conflict occured +fi + +if [ "$DEEP" = true ] ; then + + flutter pub get + flutter packages pub run build_runner build --delete-conflicting-outputs + flutter pub run flutter_launcher_icons:main + +fi + +git add . +git commit -m "Local changes after sync" +git push \ No newline at end of file diff --git a/watch b/watch new file mode 100755 index 0000000..5062481 --- /dev/null +++ b/watch @@ -0,0 +1,30 @@ +#!/bin/bash + +# ------- CONFIGURATION ---------------- +export GITHUB_WEBHOOK_URL="https://smee.io/" +export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" +# ------- END CONFIGURATION ------------ + +echo "Flutter hotreload watch for a DART files changes..." + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +trap ctrl_c INT +function ctrl_c() { + # echo "** Trapped CTRL-C" + kill -9 $child_1_pid + kill -9 "$$" +} + +# REQUIREMENTS +# 1. Run `npm install -- prefix ./tools/github` +# 2. Setup GITHUB_WEBHOOK_URL and GITHUB_WEBHOOK_SECRET in this script and in the GitHub Webhook settings +$SCRIPT_DIR/./tools/github/listen & +child_1_pid=$! + +while true +do + # 3. install `entr` with `brew install entr` + find "$SCRIPT_DIR/./lib/" -name '*.dart' | entr -dnp "$SCRIPT_DIR/./tools/hotreloader.sh" /_ + sleep 1 +done \ No newline at end of file From 780ca3a8164f6cc595de3b6144b7c39ee0ad8e26 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:27:47 +0200 Subject: [PATCH 06/11] iOS first run --- .../plugins/GeneratedPluginRegistrant.java | 27 +- ios/Podfile.lock | 49 ++ ios/Runner.xcodeproj/project.pbxproj | 90 ++- .../contents.xcworkspacedata | 2 +- .../xcshareddata/xcschemes/Runner.xcscheme | 2 +- .../contents.xcworkspacedata | 3 + ios/Runner/GeneratedPluginRegistrant.h | 2 + ios/Runner/GeneratedPluginRegistrant.m | 33 +- ios/Runner/Info.plist | 2 + pubspec.lock | 577 ++++++++++++++++++ watch | 2 +- 11 files changed, 762 insertions(+), 27 deletions(-) create mode 100644 ios/Podfile.lock create mode 100644 pubspec.lock diff --git a/android/app/src/main/java/io/flutter/plugins/GeneratedPluginRegistrant.java b/android/app/src/main/java/io/flutter/plugins/GeneratedPluginRegistrant.java index 33c1b3a..e831f96 100644 --- a/android/app/src/main/java/io/flutter/plugins/GeneratedPluginRegistrant.java +++ b/android/app/src/main/java/io/flutter/plugins/GeneratedPluginRegistrant.java @@ -2,9 +2,9 @@ import androidx.annotation.Keep; import androidx.annotation.NonNull; +import io.flutter.Log; import io.flutter.embedding.engine.FlutterEngine; -import io.flutter.embedding.engine.plugins.shim.ShimPluginRegistry; /** * Generated file. Do not edit. @@ -13,10 +13,27 @@ */ @Keep public final class GeneratedPluginRegistrant { + private static final String TAG = "GeneratedPluginRegistrant"; public static void registerWith(@NonNull FlutterEngine flutterEngine) { - ShimPluginRegistry shimPluginRegistry = new ShimPluginRegistry(flutterEngine); - io.flutter.plugins.firebaseauth.FirebaseAuthPlugin.registerWith(shimPluginRegistry.registrarFor("io.flutter.plugins.firebaseauth.FirebaseAuthPlugin")); - flutterEngine.getPlugins().add(new io.flutter.plugins.firebase.core.FirebaseCorePlugin()); - io.flutter.plugins.googlesignin.GoogleSignInPlugin.registerWith(shimPluginRegistry.registrarFor("io.flutter.plugins.googlesignin.GoogleSignInPlugin")); + try { + flutterEngine.getPlugins().add(new io.flutter.plugins.pathprovider.PathProviderPlugin()); + } catch(Exception e) { + Log.e(TAG, "Error registering plugin path_provider_android, io.flutter.plugins.pathprovider.PathProviderPlugin", e); + } + try { + flutterEngine.getPlugins().add(new io.flutter.plugins.sharedpreferences.SharedPreferencesPlugin()); + } catch(Exception e) { + Log.e(TAG, "Error registering plugin shared_preferences_android, io.flutter.plugins.sharedpreferences.SharedPreferencesPlugin", e); + } + try { + flutterEngine.getPlugins().add(new com.tekartik.sqflite.SqflitePlugin()); + } catch(Exception e) { + Log.e(TAG, "Error registering plugin sqflite, com.tekartik.sqflite.SqflitePlugin", e); + } + try { + flutterEngine.getPlugins().add(new io.flutter.plugins.urllauncher.UrlLauncherPlugin()); + } catch(Exception e) { + Log.e(TAG, "Error registering plugin url_launcher_android, io.flutter.plugins.urllauncher.UrlLauncherPlugin", e); + } } } diff --git a/ios/Podfile.lock b/ios/Podfile.lock new file mode 100644 index 0000000..6f18195 --- /dev/null +++ b/ios/Podfile.lock @@ -0,0 +1,49 @@ +PODS: + - Flutter (1.0.0) + - FMDB (2.7.5): + - FMDB/standard (= 2.7.5) + - FMDB/standard (2.7.5) + - path_provider_ios (0.0.1): + - Flutter + - shared_preferences_ios (0.0.1): + - Flutter + - sqflite (0.0.2): + - Flutter + - FMDB (>= 2.7.5) + - url_launcher_ios (0.0.1): + - Flutter + +DEPENDENCIES: + - Flutter (from `Flutter`) + - path_provider_ios (from `.symlinks/plugins/path_provider_ios/ios`) + - shared_preferences_ios (from `.symlinks/plugins/shared_preferences_ios/ios`) + - sqflite (from `.symlinks/plugins/sqflite/ios`) + - url_launcher_ios (from `.symlinks/plugins/url_launcher_ios/ios`) + +SPEC REPOS: + trunk: + - FMDB + +EXTERNAL SOURCES: + Flutter: + :path: Flutter + path_provider_ios: + :path: ".symlinks/plugins/path_provider_ios/ios" + shared_preferences_ios: + :path: ".symlinks/plugins/shared_preferences_ios/ios" + sqflite: + :path: ".symlinks/plugins/sqflite/ios" + url_launcher_ios: + :path: ".symlinks/plugins/url_launcher_ios/ios" + +SPEC CHECKSUMS: + Flutter: f04841e97a9d0b0a8025694d0796dd46242b2854 + FMDB: 2ce00b547f966261cd18927a3ddb07cb6f3db82a + path_provider_ios: 14f3d2fd28c4fdb42f44e0f751d12861c43cee02 + shared_preferences_ios: 548a61f8053b9b8a49ac19c1ffbc8b92c50d68ad + sqflite: 6d358c025f5b867b29ed92fc697fd34924e11904 + url_launcher_ios: 839c58cdb4279282219f5e248c3321761ff3c4de + +PODFILE CHECKSUM: eae4152c3630e172b84de35121c0831e1f634c9d + +COCOAPODS: 1.11.3 diff --git a/ios/Runner.xcodeproj/project.pbxproj b/ios/Runner.xcodeproj/project.pbxproj index 388736b..b350464 100644 --- a/ios/Runner.xcodeproj/project.pbxproj +++ b/ios/Runner.xcodeproj/project.pbxproj @@ -3,14 +3,15 @@ archiveVersion = 1; classes = { }; - objectVersion = 46; + objectVersion = 50; objects = { /* Begin PBXBuildFile section */ 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; - 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; }; + 529F0E8BE8D5D953F2628E94 /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 16CAE80E8383B0069FF4A85E /* Pods_Runner.framework */; }; 6436409A27A31CD800820AF7 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 6436409C27A31CD800820AF7 /* InfoPlist.strings */; }; + 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; @@ -32,7 +33,9 @@ /* Begin PBXFileReference section */ 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; + 16CAE80E8383B0069FF4A85E /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; + 62FB4BBDE513DEBAE75122AD /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = ""; }; 74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; @@ -43,6 +46,8 @@ 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 9FEC031413496A2FAA2D84EF /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; + B514C7DAD1D4E817D73EAD29 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -50,6 +55,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 529F0E8BE8D5D953F2628E94 /* Pods_Runner.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -73,6 +79,8 @@ 9740EEB11CF90186004384FC /* Flutter */, 97C146F01CF9000F007C117D /* Runner */, 97C146EF1CF9000F007C117D /* Products */, + FF3916960E8A303B35FA7505 /* Pods */, + CEBD114BBB37F771F14F6CC9 /* Frameworks */, ); sourceTree = ""; }; @@ -100,6 +108,25 @@ path = Runner; sourceTree = ""; }; + CEBD114BBB37F771F14F6CC9 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 16CAE80E8383B0069FF4A85E /* Pods_Runner.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + FF3916960E8A303B35FA7505 /* Pods */ = { + isa = PBXGroup; + children = ( + 62FB4BBDE513DEBAE75122AD /* Pods-Runner.debug.xcconfig */, + 9FEC031413496A2FAA2D84EF /* Pods-Runner.release.xcconfig */, + B514C7DAD1D4E817D73EAD29 /* Pods-Runner.profile.xcconfig */, + ); + name = Pods; + path = Pods; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -107,12 +134,14 @@ isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( + BAC14F11B4C78F676E04F220 /* [CP] Check Pods Manifest.lock */, 9740EEB61CF901F6004384FC /* Run Script */, 97C146EA1CF9000F007C117D /* Sources */, 97C146EB1CF9000F007C117D /* Frameworks */, 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + E28FCB2BE2C1AB429D860308 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -129,7 +158,7 @@ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 1020; + LastUpgradeCheck = 1300; ORGANIZATIONNAME = ""; TargetAttributes = { 97C146ED1CF9000F007C117D = { @@ -200,6 +229,45 @@ shellPath = /bin/sh; shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; }; + BAC14F11B4C78F676E04F220 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + E28FCB2BE2C1AB429D860308 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ @@ -218,7 +286,6 @@ 6436409C27A31CD800820AF7 /* InfoPlist.strings */ = { isa = PBXVariantGroup; children = ( - ); name = InfoPlist.strings; sourceTree = ""; @@ -305,7 +372,10 @@ "$(PROJECT_DIR)/Flutter", ); INFOPLIST_FILE = Runner/Info.plist; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", @@ -437,7 +507,10 @@ "$(PROJECT_DIR)/Flutter", ); INFOPLIST_FILE = Runner/Info.plist; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", @@ -464,7 +537,10 @@ "$(PROJECT_DIR)/Flutter", ); INFOPLIST_FILE = Runner/Info.plist; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); LIBRARY_SEARCH_PATHS = ( "$(inherited)", "$(PROJECT_DIR)/Flutter", diff --git a/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata index 1d526a1..919434a 100644 --- a/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata +++ b/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -2,6 +2,6 @@ + location = "self:"> diff --git a/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index a28140c..3db53b6 100644 --- a/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ + + diff --git a/ios/Runner/GeneratedPluginRegistrant.h b/ios/Runner/GeneratedPluginRegistrant.h index ed9a5c6..7a89092 100644 --- a/ios/Runner/GeneratedPluginRegistrant.h +++ b/ios/Runner/GeneratedPluginRegistrant.h @@ -2,6 +2,8 @@ // Generated file. Do not edit. // +// clang-format off + #ifndef GeneratedPluginRegistrant_h #define GeneratedPluginRegistrant_h diff --git a/ios/Runner/GeneratedPluginRegistrant.m b/ios/Runner/GeneratedPluginRegistrant.m index 36b53fe..ea44c71 100644 --- a/ios/Runner/GeneratedPluginRegistrant.m +++ b/ios/Runner/GeneratedPluginRegistrant.m @@ -2,32 +2,41 @@ // Generated file. Do not edit. // +// clang-format off + #import "GeneratedPluginRegistrant.h" -#if __has_include() -#import +#if __has_include() +#import +#else +@import path_provider_ios; +#endif + +#if __has_include() +#import #else -@import firebase_auth; +@import shared_preferences_ios; #endif -#if __has_include() -#import +#if __has_include() +#import #else -@import firebase_core; +@import sqflite; #endif -#if __has_include() -#import +#if __has_include() +#import #else -@import google_sign_in; +@import url_launcher_ios; #endif @implementation GeneratedPluginRegistrant + (void)registerWithRegistry:(NSObject*)registry { - [FLTFirebaseAuthPlugin registerWithRegistrar:[registry registrarForPlugin:@"FLTFirebaseAuthPlugin"]]; - [FLTFirebaseCorePlugin registerWithRegistrar:[registry registrarForPlugin:@"FLTFirebaseCorePlugin"]]; - [FLTGoogleSignInPlugin registerWithRegistrar:[registry registrarForPlugin:@"FLTGoogleSignInPlugin"]]; + [FLTPathProviderPlugin registerWithRegistrar:[registry registrarForPlugin:@"FLTPathProviderPlugin"]]; + [FLTSharedPreferencesPlugin registerWithRegistrar:[registry registrarForPlugin:@"FLTSharedPreferencesPlugin"]]; + [SqflitePlugin registerWithRegistrar:[registry registrarForPlugin:@"SqflitePlugin"]]; + [FLTURLLauncherPlugin registerWithRegistrar:[registry registrarForPlugin:@"FLTURLLauncherPlugin"]]; } @end diff --git a/ios/Runner/Info.plist b/ios/Runner/Info.plist index 7a13b58..8e62e77 100644 --- a/ios/Runner/Info.plist +++ b/ios/Runner/Info.plist @@ -66,5 +66,7 @@ UIViewControllerBasedStatusBarAppearance + CADisableMinimumFrameDurationOnPhone + diff --git a/pubspec.lock b/pubspec.lock new file mode 100644 index 0000000..bb0c6c1 --- /dev/null +++ b/pubspec.lock @@ -0,0 +1,577 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + async: + dependency: transitive + description: + name: async + url: "https://pub.dartlang.org" + source: hosted + version: "2.9.0" + auto_size_text: + dependency: "direct main" + description: + name: auto_size_text + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.0" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.0" + cached_network_image: + dependency: "direct main" + description: + name: cached_network_image + url: "https://pub.dartlang.org" + source: hosted + version: "3.2.1" + cached_network_image_platform_interface: + dependency: transitive + description: + name: cached_network_image_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.0" + cached_network_image_web: + dependency: transitive + description: + name: cached_network_image_web + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" + characters: + dependency: transitive + description: + name: characters + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.1" + clock: + dependency: transitive + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.1" + collection: + dependency: transitive + description: + name: collection + url: "https://pub.dartlang.org" + source: hosted + version: "1.16.0" + crypto: + dependency: transitive + description: + name: crypto + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.2" + cupertino_icons: + dependency: "direct main" + description: + name: cupertino_icons + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.5" + fake_async: + dependency: transitive + description: + name: fake_async + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.1" + ffi: + dependency: transitive + description: + name: ffi + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.1" + file: + dependency: transitive + description: + name: file + url: "https://pub.dartlang.org" + source: hosted + version: "6.1.4" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_animate: + dependency: "direct main" + description: + name: flutter_animate + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.0" + flutter_blurhash: + dependency: transitive + description: + name: flutter_blurhash + url: "https://pub.dartlang.org" + source: hosted + version: "0.7.0" + flutter_cache_manager: + dependency: transitive + description: + name: flutter_cache_manager + url: "https://pub.dartlang.org" + source: hosted + version: "3.3.0" + flutter_localizations: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" + flutter_web_plugins: + dependency: transitive + description: flutter + source: sdk + version: "0.0.0" + font_awesome_flutter: + dependency: "direct main" + description: + name: font_awesome_flutter + url: "https://pub.dartlang.org" + source: hosted + version: "10.1.0" + from_css_color: + dependency: "direct main" + description: + name: from_css_color + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + go_router: + dependency: "direct main" + description: + name: go_router + url: "https://pub.dartlang.org" + source: hosted + version: "3.1.0" + google_fonts: + dependency: "direct main" + description: + name: google_fonts + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + http: + dependency: transitive + description: + name: http + url: "https://pub.dartlang.org" + source: hosted + version: "0.13.5" + http_parser: + dependency: transitive + description: + name: http_parser + url: "https://pub.dartlang.org" + source: hosted + version: "4.0.2" + intl: + dependency: "direct main" + description: + name: intl + url: "https://pub.dartlang.org" + source: hosted + version: "0.17.0" + js: + dependency: transitive + description: + name: js + url: "https://pub.dartlang.org" + source: hosted + version: "0.6.4" + json_path: + dependency: "direct main" + description: + name: json_path + url: "https://pub.dartlang.org" + source: hosted + version: "0.4.1" + logging: + dependency: transitive + description: + name: logging + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + matcher: + dependency: transitive + description: + name: matcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.12.12" + material_color_utilities: + dependency: transitive + description: + name: material_color_utilities + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.5" + meta: + dependency: transitive + description: + name: meta + url: "https://pub.dartlang.org" + source: hosted + version: "1.8.0" + octo_image: + dependency: transitive + description: + name: octo_image + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + page_transition: + dependency: "direct main" + description: + name: page_transition + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.4" + path: + dependency: transitive + description: + name: path + url: "https://pub.dartlang.org" + source: hosted + version: "1.8.2" + path_provider: + dependency: transitive + description: + name: path_provider + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.11" + path_provider_android: + dependency: transitive + description: + name: path_provider_android + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.20" + path_provider_ios: + dependency: transitive + description: + name: path_provider_ios + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.11" + path_provider_linux: + dependency: transitive + description: + name: path_provider_linux + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.7" + path_provider_macos: + dependency: transitive + description: + name: path_provider_macos + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.6" + path_provider_platform_interface: + dependency: transitive + description: + name: path_provider_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.5" + path_provider_windows: + dependency: transitive + description: + name: path_provider_windows + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.3" + pedantic: + dependency: transitive + description: + name: pedantic + url: "https://pub.dartlang.org" + source: hosted + version: "1.11.1" + petitparser: + dependency: transitive + description: + name: petitparser + url: "https://pub.dartlang.org" + source: hosted + version: "5.0.0" + platform: + dependency: transitive + description: + name: platform + url: "https://pub.dartlang.org" + source: hosted + version: "3.1.0" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.3" + process: + dependency: transitive + description: + name: process + url: "https://pub.dartlang.org" + source: hosted + version: "4.2.4" + rfc_6901: + dependency: transitive + description: + name: rfc_6901 + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.1" + rxdart: + dependency: transitive + description: + name: rxdart + url: "https://pub.dartlang.org" + source: hosted + version: "0.27.5" + shared_preferences: + dependency: "direct main" + description: + name: shared_preferences + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.15" + shared_preferences_android: + dependency: transitive + description: + name: shared_preferences_android + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.14" + shared_preferences_ios: + dependency: transitive + description: + name: shared_preferences_ios + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + shared_preferences_linux: + dependency: transitive + description: + name: shared_preferences_linux + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + shared_preferences_macos: + dependency: transitive + description: + name: shared_preferences_macos + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.4" + shared_preferences_platform_interface: + dependency: transitive + description: + name: shared_preferences_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.0" + shared_preferences_web: + dependency: transitive + description: + name: shared_preferences_web + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.4" + shared_preferences_windows: + dependency: transitive + description: + name: shared_preferences_windows + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.0" + sqflite: + dependency: transitive + description: + name: sqflite + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.0+1" + sqflite_common: + dependency: transitive + description: + name: sqflite_common + url: "https://pub.dartlang.org" + source: hosted + version: "2.3.0" + stack_trace: + dependency: transitive + description: + name: stack_trace + url: "https://pub.dartlang.org" + source: hosted + version: "1.10.0" + stream_channel: + dependency: transitive + description: + name: stream_channel + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.0" + string_scanner: + dependency: transitive + description: + name: string_scanner + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.1" + synchronized: + dependency: transitive + description: + name: synchronized + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.0+3" + term_glyph: + dependency: transitive + description: + name: term_glyph + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.1" + test_api: + dependency: transitive + description: + name: test_api + url: "https://pub.dartlang.org" + source: hosted + version: "0.4.12" + timeago: + dependency: "direct main" + description: + name: timeago + url: "https://pub.dartlang.org" + source: hosted + version: "3.2.2" + typed_data: + dependency: transitive + description: + name: typed_data + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.1" + url_launcher: + dependency: "direct main" + description: + name: url_launcher + url: "https://pub.dartlang.org" + source: hosted + version: "6.1.5" + url_launcher_android: + dependency: transitive + description: + name: url_launcher_android + url: "https://pub.dartlang.org" + source: hosted + version: "6.0.19" + url_launcher_ios: + dependency: transitive + description: + name: url_launcher_ios + url: "https://pub.dartlang.org" + source: hosted + version: "6.0.17" + url_launcher_linux: + dependency: transitive + description: + name: url_launcher_linux + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + url_launcher_macos: + dependency: transitive + description: + name: url_launcher_macos + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + url_launcher_platform_interface: + dependency: transitive + description: + name: url_launcher_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + url_launcher_web: + dependency: transitive + description: + name: url_launcher_web + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.13" + url_launcher_windows: + dependency: transitive + description: + name: url_launcher_windows + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + uuid: + dependency: transitive + description: + name: uuid + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.6" + vector_math: + dependency: transitive + description: + name: vector_math + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.2" + win32: + dependency: transitive + description: + name: win32 + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + xdg_directories: + dependency: transitive + description: + name: xdg_directories + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.0+2" +sdks: + dart: ">=2.18.0-0 <3.0.0" + flutter: ">=3.3.0-0" diff --git a/watch b/watch index 5062481..b584e06 100755 --- a/watch +++ b/watch @@ -1,7 +1,7 @@ #!/bin/bash # ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/" +export GITHUB_WEBHOOK_URL="https://smee.io/aLVrM23iI4IPWC5" export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" # ------- END CONFIGURATION ------------ From c36582ae984566549a1d96dd4222970fa6f6d3a6 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:28:07 +0200 Subject: [PATCH 07/11] Installed flutterflow-socket --- tools/github/.gitignore | 1 + tools/github/package-lock.json | 1028 ++++++++++++++++++++++++++++++++ tools/github/package.json | 20 + watch | 2 +- 4 files changed, 1050 insertions(+), 1 deletion(-) create mode 100644 tools/github/.gitignore create mode 100644 tools/github/package-lock.json create mode 100644 tools/github/package.json diff --git a/tools/github/.gitignore b/tools/github/.gitignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/tools/github/.gitignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/tools/github/package-lock.json b/tools/github/package-lock.json new file mode 100644 index 0000000..ac1155c --- /dev/null +++ b/tools/github/package-lock.json @@ -0,0 +1,1028 @@ +{ + "name": "github-auto-sync", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "github-auto-sync", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "node_modules/@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "dependencies": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "dependencies": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "node_modules/@types/node": { + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", + "dev": true + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "node_modules/cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "dependencies": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formidable/node_modules/qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "engines": { + "node": ">=8" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "dependencies": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "bin": { + "smee": "bin/smee.js" + } + }, + "node_modules/smee-client/node_modules/eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/superagent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "requires": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "requires": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "requires": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + } + }, + "@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==" + }, + "@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "@types/node": { + "version": "18.11.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.0.tgz", + "integrity": "sha512-IOXCvVRToe7e0ny7HpT/X9Rb2RYtElG1a+VshjwT00HxrM2dWBApHQoqsI6WiY7Q03vdf2bCrIGzVrkF/5t10w==", + "dev": true + }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "requires": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "requires": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "dependencies": { + "qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==" + } + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==" + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" + }, + "mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "requires": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "requires": { + "ee-first": "1.1.1" + } + }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "requires": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "dependencies": { + "eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==" + } + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } + } + }, + "superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "requires": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "dependencies": { + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } +} diff --git a/tools/github/package.json b/tools/github/package.json new file mode 100644 index 0000000..c6d329d --- /dev/null +++ b/tools/github/package.json @@ -0,0 +1,20 @@ +{ + "name": "github-auto-sync", + "version": "1.0.0", + "description": "", + "main": "listen", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "Matija Stepanic", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + }, + "type": "commonjs" +} diff --git a/watch b/watch index b584e06..5062481 100755 --- a/watch +++ b/watch @@ -1,7 +1,7 @@ #!/bin/bash # ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/aLVrM23iI4IPWC5" +export GITHUB_WEBHOOK_URL="https://smee.io/" export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" # ------- END CONFIGURATION ------------ From 3a7e6a99aad41bb4ea5ac61facf2af301ead0b2e Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Fri, 21 Oct 2022 01:37:02 +0200 Subject: [PATCH 08/11] Local changes before sync --- node_modules/.bin/mime | 1 + node_modules/.bin/semver | 1 + node_modules/.bin/smee | 1 + node_modules/.package-lock.json | 599 + node_modules/@octokit/openapi-types/LICENSE | 7 + node_modules/@octokit/openapi-types/README.md | 17 + .../@octokit/openapi-types/package.json | 20 + .../@octokit/openapi-types/types.d.ts | 46693 ++++++++++++++++ node_modules/@octokit/request-error/LICENSE | 21 + node_modules/@octokit/request-error/README.md | 71 + .../@octokit/request-error/dist-node/index.js | 74 + .../request-error/dist-node/index.js.map | 1 + .../@octokit/request-error/dist-src/index.js | 55 + .../@octokit/request-error/dist-src/types.js | 1 + .../request-error/dist-types/index.d.ts | 33 + .../request-error/dist-types/types.d.ts | 9 + .../@octokit/request-error/dist-web/index.js | 59 + .../request-error/dist-web/index.js.map | 1 + .../@octokit/request-error/package.json | 49 + node_modules/@octokit/types/LICENSE | 7 + node_modules/@octokit/types/README.md | 65 + .../@octokit/types/dist-node/index.js | 8 + .../@octokit/types/dist-node/index.js.map | 1 + .../@octokit/types/dist-src/AuthInterface.js | 1 + .../types/dist-src/EndpointDefaults.js | 1 + .../types/dist-src/EndpointInterface.js | 1 + .../types/dist-src/EndpointOptions.js | 1 + node_modules/@octokit/types/dist-src/Fetch.js | 1 + .../GetResponseTypeFromEndpointMethod.js | 1 + .../types/dist-src/OctokitResponse.js | 1 + .../@octokit/types/dist-src/RequestError.js | 1 + .../@octokit/types/dist-src/RequestHeaders.js | 1 + .../types/dist-src/RequestInterface.js | 1 + .../@octokit/types/dist-src/RequestMethod.js | 1 + .../@octokit/types/dist-src/RequestOptions.js | 1 + .../types/dist-src/RequestParameters.js | 1 + .../types/dist-src/RequestRequestOptions.js | 1 + .../types/dist-src/ResponseHeaders.js | 1 + node_modules/@octokit/types/dist-src/Route.js | 1 + .../@octokit/types/dist-src/Signal.js | 1 + .../types/dist-src/StrategyInterface.js | 1 + node_modules/@octokit/types/dist-src/Url.js | 1 + .../@octokit/types/dist-src/VERSION.js | 1 + .../types/dist-src/generated/Endpoints.js | 1 + node_modules/@octokit/types/dist-src/index.js | 21 + .../types/dist-types/AuthInterface.d.ts | 31 + .../types/dist-types/EndpointDefaults.d.ts | 21 + .../types/dist-types/EndpointInterface.d.ts | 65 + .../types/dist-types/EndpointOptions.d.ts | 7 + .../@octokit/types/dist-types/Fetch.d.ts | 4 + .../GetResponseTypeFromEndpointMethod.d.ts | 5 + .../types/dist-types/OctokitResponse.d.ts | 17 + .../types/dist-types/RequestError.d.ts | 11 + .../types/dist-types/RequestHeaders.d.ts | 15 + .../types/dist-types/RequestInterface.d.ts | 34 + .../types/dist-types/RequestMethod.d.ts | 4 + .../types/dist-types/RequestOptions.d.ts | 14 + .../types/dist-types/RequestParameters.d.ts | 45 + .../dist-types/RequestRequestOptions.d.ts | 26 + .../types/dist-types/ResponseHeaders.d.ts | 20 + .../@octokit/types/dist-types/Route.d.ts | 4 + .../@octokit/types/dist-types/Signal.d.ts | 6 + .../types/dist-types/StrategyInterface.d.ts | 4 + .../@octokit/types/dist-types/Url.d.ts | 4 + .../@octokit/types/dist-types/VERSION.d.ts | 1 + .../types/dist-types/generated/Endpoints.d.ts | 3499 ++ .../@octokit/types/dist-types/index.d.ts | 21 + node_modules/@octokit/types/dist-web/index.js | 4 + .../@octokit/types/dist-web/index.js.map | 1 + node_modules/@octokit/types/package.json | 54 + .../@octokit/webhooks-methods/LICENSE | 9 + .../@octokit/webhooks-methods/README.md | 193 + .../webhooks-methods/dist-node/index.js | 70 + .../webhooks-methods/dist-node/index.js.map | 1 + .../webhooks-methods/dist-src/index.js | 2 + .../webhooks-methods/dist-src/node/sign.js | 21 + .../webhooks-methods/dist-src/node/verify.js | 21 + .../webhooks-methods/dist-src/types.js | 5 + .../webhooks-methods/dist-src/utils.js | 3 + .../webhooks-methods/dist-src/version.js | 1 + .../@octokit/webhooks-methods/dist-src/web.js | 57 + .../webhooks-methods/dist-types/index.d.ts | 2 + .../dist-types/node/sign.d.ts | 5 + .../dist-types/node/verify.d.ts | 4 + .../webhooks-methods/dist-types/types.d.ts | 9 + .../webhooks-methods/dist-types/utils.d.ts | 1 + .../webhooks-methods/dist-types/version.d.ts | 1 + .../webhooks-methods/dist-types/web.d.ts | 3 + .../webhooks-methods/dist-web/index.js | 68 + .../webhooks-methods/dist-web/index.js.map | 1 + .../@octokit/webhooks-methods/package.json | 47 + .../@octokit/webhooks-types/README.md | 46 + .../@octokit/webhooks-types/package.json | 20 + .../@octokit/webhooks-types/schema.d.ts | 7443 +++ node_modules/@octokit/webhooks/LICENSE.md | 20 + node_modules/@octokit/webhooks/README.md | 731 + .../@octokit/webhooks/dist-node/index.js | 374 + .../@octokit/webhooks/dist-node/index.js.map | 1 + .../webhooks/dist-src/createLogger.js | 7 + .../webhooks/dist-src/event-handler/index.js | 20 + .../webhooks/dist-src/event-handler/on.js | 28 + .../dist-src/event-handler/receive.js | 57 + .../dist-src/event-handler/remove-listener.js | 17 + .../event-handler/wrap-error-handler.js | 19 + .../dist-src/generated/webhook-names.js | 255 + .../@octokit/webhooks/dist-src/index.js | 30 + .../middleware/node/get-missing-headers.js | 9 + .../dist-src/middleware/node/get-payload.js | 25 + .../dist-src/middleware/node/index.js | 10 + .../dist-src/middleware/node/middleware.js | 76 + .../node/on-unhandled-request-default.js | 8 + .../dist-src/middleware/node/types.js | 1 + .../@octokit/webhooks/dist-src/sign.js | 5 + .../dist-src/to-normalized-json-string.js | 9 + .../@octokit/webhooks/dist-src/types.js | 1 + .../webhooks/dist-src/verify-and-receive.js | 19 + .../@octokit/webhooks/dist-src/verify.js | 5 + .../webhooks/dist-types/createLogger.d.ts | 7 + .../dist-types/event-handler/index.d.ts | 10 + .../webhooks/dist-types/event-handler/on.d.ts | 4 + .../dist-types/event-handler/receive.d.ts | 2 + .../event-handler/remove-listener.d.ts | 2 + .../event-handler/wrap-error-handler.d.ts | 1 + .../dist-types/generated/webhook-names.d.ts | 1 + .../@octokit/webhooks/dist-types/index.d.ts | 18 + .../middleware/node/get-missing-headers.d.ts | 3 + .../middleware/node/get-payload.d.ts | 4 + .../dist-types/middleware/node/index.d.ts | 3 + .../middleware/node/middleware.d.ts | 6 + .../node/on-unhandled-request-default.d.ts | 4 + .../dist-types/middleware/node/types.d.ts | 9 + .../@octokit/webhooks/dist-types/sign.d.ts | 1 + .../dist-types/to-normalized-json-string.d.ts | 4 + .../@octokit/webhooks/dist-types/types.d.ts | 62 + .../dist-types/verify-and-receive.d.ts | 4 + .../@octokit/webhooks/dist-types/verify.d.ts | 1 + .../@octokit/webhooks/dist-web/index.js | 594 + .../@octokit/webhooks/dist-web/index.js.map | 1 + node_modules/@octokit/webhooks/package.json | 53 + node_modules/@types/node/LICENSE | 21 + node_modules/@types/node/README.md | 16 + node_modules/@types/node/assert.d.ts | 911 + node_modules/@types/node/assert/strict.d.ts | 8 + node_modules/@types/node/async_hooks.d.ts | 501 + node_modules/@types/node/buffer.d.ts | 2258 + node_modules/@types/node/child_process.d.ts | 1369 + node_modules/@types/node/cluster.d.ts | 410 + node_modules/@types/node/console.d.ts | 412 + node_modules/@types/node/constants.d.ts | 18 + node_modules/@types/node/crypto.d.ts | 3964 ++ node_modules/@types/node/dgram.d.ts | 545 + .../@types/node/diagnostics_channel.d.ts | 153 + node_modules/@types/node/dns.d.ts | 659 + node_modules/@types/node/dns/promises.d.ts | 370 + node_modules/@types/node/dom-events.d.ts | 126 + node_modules/@types/node/domain.d.ts | 170 + node_modules/@types/node/events.d.ts | 678 + node_modules/@types/node/fs.d.ts | 3872 ++ node_modules/@types/node/fs/promises.d.ts | 1138 + node_modules/@types/node/globals.d.ts | 300 + node_modules/@types/node/globals.global.d.ts | 1 + node_modules/@types/node/http.d.ts | 1607 + node_modules/@types/node/http2.d.ts | 2134 + node_modules/@types/node/https.d.ts | 541 + node_modules/@types/node/index.d.ts | 133 + node_modules/@types/node/inspector.d.ts | 2741 + node_modules/@types/node/module.d.ts | 114 + node_modules/@types/node/net.d.ts | 869 + node_modules/@types/node/os.d.ts | 466 + node_modules/@types/node/package.json | 232 + node_modules/@types/node/path.d.ts | 191 + node_modules/@types/node/perf_hooks.d.ts | 625 + node_modules/@types/node/process.d.ts | 1482 + node_modules/@types/node/punycode.d.ts | 117 + node_modules/@types/node/querystring.d.ts | 131 + node_modules/@types/node/readline.d.ts | 653 + .../@types/node/readline/promises.d.ts | 143 + node_modules/@types/node/repl.d.ts | 424 + node_modules/@types/node/stream.d.ts | 1340 + .../@types/node/stream/consumers.d.ts | 12 + node_modules/@types/node/stream/promises.d.ts | 42 + node_modules/@types/node/stream/web.d.ts | 330 + node_modules/@types/node/string_decoder.d.ts | 67 + node_modules/@types/node/test.d.ts | 314 + node_modules/@types/node/timers.d.ts | 94 + node_modules/@types/node/timers/promises.d.ts | 68 + node_modules/@types/node/tls.d.ts | 1028 + node_modules/@types/node/trace_events.d.ts | 171 + node_modules/@types/node/ts4.8/assert.d.ts | 911 + .../@types/node/ts4.8/assert/strict.d.ts | 8 + .../@types/node/ts4.8/async_hooks.d.ts | 501 + node_modules/@types/node/ts4.8/buffer.d.ts | 2259 + .../@types/node/ts4.8/child_process.d.ts | 1369 + node_modules/@types/node/ts4.8/cluster.d.ts | 410 + node_modules/@types/node/ts4.8/console.d.ts | 412 + node_modules/@types/node/ts4.8/constants.d.ts | 18 + node_modules/@types/node/ts4.8/crypto.d.ts | 3964 ++ node_modules/@types/node/ts4.8/dgram.d.ts | 545 + .../node/ts4.8/diagnostics_channel.d.ts | 153 + node_modules/@types/node/ts4.8/dns.d.ts | 659 + .../@types/node/ts4.8/dns/promises.d.ts | 370 + .../@types/node/ts4.8/dom-events.d.ts | 126 + node_modules/@types/node/ts4.8/domain.d.ts | 170 + node_modules/@types/node/ts4.8/events.d.ts | 678 + node_modules/@types/node/ts4.8/fs.d.ts | 3872 ++ .../@types/node/ts4.8/fs/promises.d.ts | 1138 + node_modules/@types/node/ts4.8/globals.d.ts | 294 + .../@types/node/ts4.8/globals.global.d.ts | 1 + node_modules/@types/node/ts4.8/http.d.ts | 1607 + node_modules/@types/node/ts4.8/http2.d.ts | 2134 + node_modules/@types/node/ts4.8/https.d.ts | 541 + node_modules/@types/node/ts4.8/index.d.ts | 88 + node_modules/@types/node/ts4.8/inspector.d.ts | 2741 + node_modules/@types/node/ts4.8/module.d.ts | 114 + node_modules/@types/node/ts4.8/net.d.ts | 869 + node_modules/@types/node/ts4.8/os.d.ts | 466 + node_modules/@types/node/ts4.8/path.d.ts | 191 + .../@types/node/ts4.8/perf_hooks.d.ts | 625 + node_modules/@types/node/ts4.8/process.d.ts | 1482 + node_modules/@types/node/ts4.8/punycode.d.ts | 117 + .../@types/node/ts4.8/querystring.d.ts | 131 + node_modules/@types/node/ts4.8/readline.d.ts | 653 + .../@types/node/ts4.8/readline/promises.d.ts | 143 + node_modules/@types/node/ts4.8/repl.d.ts | 424 + node_modules/@types/node/ts4.8/stream.d.ts | 1340 + .../@types/node/ts4.8/stream/consumers.d.ts | 12 + .../@types/node/ts4.8/stream/promises.d.ts | 42 + .../@types/node/ts4.8/stream/web.d.ts | 330 + .../@types/node/ts4.8/string_decoder.d.ts | 67 + node_modules/@types/node/ts4.8/test.d.ts | 314 + node_modules/@types/node/ts4.8/timers.d.ts | 94 + .../@types/node/ts4.8/timers/promises.d.ts | 68 + node_modules/@types/node/ts4.8/tls.d.ts | 1028 + .../@types/node/ts4.8/trace_events.d.ts | 171 + node_modules/@types/node/ts4.8/tty.d.ts | 206 + node_modules/@types/node/ts4.8/url.d.ts | 897 + node_modules/@types/node/ts4.8/util.d.ts | 1850 + node_modules/@types/node/ts4.8/v8.d.ts | 396 + node_modules/@types/node/ts4.8/vm.d.ts | 509 + node_modules/@types/node/ts4.8/wasi.d.ts | 158 + .../@types/node/ts4.8/worker_threads.d.ts | 689 + node_modules/@types/node/ts4.8/zlib.d.ts | 517 + node_modules/@types/node/tty.d.ts | 206 + node_modules/@types/node/url.d.ts | 897 + node_modules/@types/node/util.d.ts | 1850 + node_modules/@types/node/v8.d.ts | 396 + node_modules/@types/node/vm.d.ts | 509 + node_modules/@types/node/wasi.d.ts | 158 + node_modules/@types/node/worker_threads.d.ts | 689 + node_modules/@types/node/zlib.d.ts | 517 + node_modules/aggregate-error/index.d.ts | 51 + node_modules/aggregate-error/index.js | 47 + node_modules/aggregate-error/license | 9 + node_modules/aggregate-error/package.json | 41 + node_modules/aggregate-error/readme.md | 61 + node_modules/asap/CHANGES.md | 70 + node_modules/asap/LICENSE.md | 21 + node_modules/asap/README.md | 237 + node_modules/asap/asap.js | 65 + node_modules/asap/browser-asap.js | 66 + node_modules/asap/browser-raw.js | 223 + node_modules/asap/package.json | 58 + node_modules/asap/raw.js | 101 + node_modules/asynckit/LICENSE | 21 + node_modules/asynckit/README.md | 233 + node_modules/asynckit/bench.js | 76 + node_modules/asynckit/index.js | 6 + node_modules/asynckit/lib/abort.js | 29 + node_modules/asynckit/lib/async.js | 34 + node_modules/asynckit/lib/defer.js | 26 + node_modules/asynckit/lib/iterate.js | 75 + .../asynckit/lib/readable_asynckit.js | 91 + .../asynckit/lib/readable_parallel.js | 25 + node_modules/asynckit/lib/readable_serial.js | 25 + .../asynckit/lib/readable_serial_ordered.js | 29 + node_modules/asynckit/lib/state.js | 37 + node_modules/asynckit/lib/streamify.js | 141 + node_modules/asynckit/lib/terminator.js | 29 + node_modules/asynckit/package.json | 63 + node_modules/asynckit/parallel.js | 43 + node_modules/asynckit/serial.js | 17 + node_modules/asynckit/serialOrdered.js | 75 + node_modules/asynckit/stream.js | 21 + node_modules/basic-auth/HISTORY.md | 52 + node_modules/basic-auth/LICENSE | 24 + node_modules/basic-auth/README.md | 113 + node_modules/basic-auth/index.js | 133 + node_modules/basic-auth/package.json | 41 + node_modules/call-bind/.eslintignore | 1 + node_modules/call-bind/.eslintrc | 17 + node_modules/call-bind/.github/FUNDING.yml | 12 + node_modules/call-bind/.nycrc | 13 + node_modules/call-bind/CHANGELOG.md | 42 + node_modules/call-bind/LICENSE | 21 + node_modules/call-bind/README.md | 2 + node_modules/call-bind/callBound.js | 15 + node_modules/call-bind/index.js | 47 + node_modules/call-bind/package.json | 80 + node_modules/call-bind/test/callBound.js | 55 + node_modules/call-bind/test/index.js | 66 + node_modules/clean-stack/index.d.ts | 47 + node_modules/clean-stack/index.js | 40 + node_modules/clean-stack/license | 9 + node_modules/clean-stack/package.json | 39 + node_modules/clean-stack/readme.md | 76 + node_modules/combined-stream/License | 19 + node_modules/combined-stream/Readme.md | 138 + .../combined-stream/lib/combined_stream.js | 208 + node_modules/combined-stream/package.json | 25 + node_modules/combined-stream/yarn.lock | 17 + node_modules/commander/CHANGELOG.md | 419 + node_modules/commander/LICENSE | 22 + node_modules/commander/Readme.md | 428 + node_modules/commander/index.js | 1224 + node_modules/commander/package.json | 38 + node_modules/commander/typings/index.d.ts | 310 + node_modules/component-emitter/History.md | 75 + node_modules/component-emitter/LICENSE | 24 + node_modules/component-emitter/Readme.md | 74 + node_modules/component-emitter/index.js | 175 + node_modules/component-emitter/package.json | 27 + node_modules/cookiejar/LICENSE | 9 + node_modules/cookiejar/cookiejar.js | 276 + node_modules/cookiejar/package.json | 26 + node_modules/cookiejar/readme.md | 60 + node_modules/debug/.coveralls.yml | 1 + node_modules/debug/.eslintrc | 11 + node_modules/debug/.npmignore | 9 + node_modules/debug/.travis.yml | 14 + node_modules/debug/CHANGELOG.md | 362 + node_modules/debug/LICENSE | 19 + node_modules/debug/Makefile | 50 + node_modules/debug/README.md | 312 + node_modules/debug/component.json | 19 + node_modules/debug/karma.conf.js | 70 + node_modules/debug/node.js | 1 + node_modules/debug/package.json | 49 + node_modules/debug/src/browser.js | 185 + node_modules/debug/src/debug.js | 202 + node_modules/debug/src/index.js | 10 + node_modules/debug/src/inspector-log.js | 15 + node_modules/debug/src/node.js | 248 + node_modules/delayed-stream/.npmignore | 1 + node_modules/delayed-stream/License | 19 + node_modules/delayed-stream/Makefile | 7 + node_modules/delayed-stream/Readme.md | 141 + .../delayed-stream/lib/delayed_stream.js | 107 + node_modules/delayed-stream/package.json | 27 + node_modules/depd/History.md | 103 + node_modules/depd/LICENSE | 22 + node_modules/depd/Readme.md | 280 + node_modules/depd/index.js | 538 + node_modules/depd/lib/browser/index.js | 77 + node_modules/depd/package.json | 45 + node_modules/deprecation/LICENSE | 15 + node_modules/deprecation/README.md | 77 + node_modules/deprecation/dist-node/index.js | 20 + node_modules/deprecation/dist-src/index.js | 14 + .../deprecation/dist-types/index.d.ts | 3 + node_modules/deprecation/dist-web/index.js | 16 + node_modules/deprecation/package.json | 34 + node_modules/dezalgo/.travis.yml | 7 + node_modules/dezalgo/LICENSE | 15 + node_modules/dezalgo/README.md | 29 + node_modules/dezalgo/dezalgo.js | 22 + node_modules/dezalgo/package.json | 42 + node_modules/dezalgo/test/basic.js | 29 + node_modules/ee-first/LICENSE | 22 + node_modules/ee-first/README.md | 80 + node_modules/ee-first/index.js | 95 + node_modules/ee-first/package.json | 29 + node_modules/eventsource/.editorconfig | 27 + node_modules/eventsource/CONTRIBUTING.md | 13 + node_modules/eventsource/HISTORY.md | 161 + node_modules/eventsource/LICENSE | 22 + node_modules/eventsource/README.md | 91 + .../example/eventsource-polyfill.js | 9736 ++++ node_modules/eventsource/example/index.html | 40 + .../eventsource/example/sse-client.js | 5 + .../eventsource/example/sse-server.js | 29 + .../eventsource/lib/eventsource-polyfill.js | 9 + node_modules/eventsource/lib/eventsource.js | 495 + node_modules/eventsource/package.json | 60 + node_modules/fast-safe-stringify/.travis.yml | 8 + node_modules/fast-safe-stringify/CHANGELOG.md | 17 + node_modules/fast-safe-stringify/LICENSE | 23 + node_modules/fast-safe-stringify/benchmark.js | 137 + node_modules/fast-safe-stringify/index.d.ts | 23 + node_modules/fast-safe-stringify/index.js | 229 + node_modules/fast-safe-stringify/package.json | 46 + node_modules/fast-safe-stringify/readme.md | 170 + .../fast-safe-stringify/test-stable.js | 404 + node_modules/fast-safe-stringify/test.js | 397 + node_modules/form-data/License | 19 + node_modules/form-data/README.md.bak | 358 + node_modules/form-data/Readme.md | 358 + node_modules/form-data/index.d.ts | 62 + node_modules/form-data/lib/browser.js | 2 + node_modules/form-data/lib/form_data.js | 501 + node_modules/form-data/lib/populate.js | 10 + node_modules/form-data/package.json | 68 + node_modules/formidable/LICENSE | 21 + node_modules/formidable/README.md | 826 + .../formidable/node_modules/qs/.editorconfig | 33 + .../formidable/node_modules/qs/.eslintignore | 1 + .../formidable/node_modules/qs/.eslintrc | 21 + .../node_modules/qs/.github/FUNDING.yml | 12 + .../qs/.github/workflows/rebase.yml | 15 + .../formidable/node_modules/qs/CHANGELOG.md | 339 + .../formidable/node_modules/qs/LICENSE.md | 29 + .../formidable/node_modules/qs/README.md | 604 + .../formidable/node_modules/qs/dist/qs.js | 832 + .../formidable/node_modules/qs/lib/formats.js | 26 + .../formidable/node_modules/qs/lib/index.js | 11 + .../formidable/node_modules/qs/lib/parse.js | 268 + .../node_modules/qs/lib/stringify.js | 279 + .../formidable/node_modules/qs/lib/utils.js | 236 + .../formidable/node_modules/qs/package.json | 69 + .../formidable/node_modules/qs/test/.eslintrc | 18 + .../formidable/node_modules/qs/test/index.js | 7 + .../formidable/node_modules/qs/test/parse.js | 772 + .../node_modules/qs/test/stringify.js | 738 + .../formidable/node_modules/qs/test/utils.js | 136 + node_modules/formidable/package.json | 98 + node_modules/formidable/src/Formidable.js | 617 + .../formidable/src/FormidableError.js | 45 + node_modules/formidable/src/PersistentFile.js | 87 + node_modules/formidable/src/VolatileFile.js | 82 + node_modules/formidable/src/index.js | 38 + node_modules/formidable/src/parsers/Dummy.js | 21 + node_modules/formidable/src/parsers/JSON.js | 35 + .../formidable/src/parsers/Multipart.js | 347 + .../formidable/src/parsers/OctetStream.js | 12 + .../formidable/src/parsers/Querystring.js | 38 + .../src/parsers/StreamingQuerystring.js | 121 + node_modules/formidable/src/parsers/index.js | 17 + node_modules/formidable/src/plugins/index.js | 13 + node_modules/formidable/src/plugins/json.js | 38 + .../formidable/src/plugins/multipart.js | 173 + .../formidable/src/plugins/octetstream.js | 86 + .../formidable/src/plugins/querystring.js | 42 + node_modules/function-bind/.editorconfig | 20 + node_modules/function-bind/.eslintrc | 15 + node_modules/function-bind/.jscs.json | 176 + node_modules/function-bind/.npmignore | 22 + node_modules/function-bind/.travis.yml | 168 + node_modules/function-bind/LICENSE | 20 + node_modules/function-bind/README.md | 48 + node_modules/function-bind/implementation.js | 52 + node_modules/function-bind/index.js | 5 + node_modules/function-bind/package.json | 63 + node_modules/function-bind/test/.eslintrc | 9 + node_modules/function-bind/test/index.js | 252 + node_modules/get-intrinsic/.eslintrc | 37 + .../get-intrinsic/.github/FUNDING.yml | 12 + node_modules/get-intrinsic/.nycrc | 9 + node_modules/get-intrinsic/CHANGELOG.md | 98 + node_modules/get-intrinsic/LICENSE | 21 + node_modules/get-intrinsic/README.md | 71 + node_modules/get-intrinsic/index.js | 334 + node_modules/get-intrinsic/package.json | 91 + .../get-intrinsic/test/GetIntrinsic.js | 274 + node_modules/github-auto-sync | 1 + node_modules/has-symbols/.eslintrc | 11 + node_modules/has-symbols/.github/FUNDING.yml | 12 + node_modules/has-symbols/.nycrc | 9 + node_modules/has-symbols/CHANGELOG.md | 75 + node_modules/has-symbols/LICENSE | 21 + node_modules/has-symbols/README.md | 46 + node_modules/has-symbols/index.js | 13 + node_modules/has-symbols/package.json | 101 + node_modules/has-symbols/shams.js | 42 + node_modules/has-symbols/test/index.js | 22 + .../has-symbols/test/shams/core-js.js | 28 + .../test/shams/get-own-property-symbols.js | 28 + node_modules/has-symbols/test/tests.js | 56 + node_modules/has/LICENSE-MIT | 22 + node_modules/has/README.md | 18 + node_modules/has/package.json | 48 + node_modules/has/src/index.js | 5 + node_modules/has/test/index.js | 10 + node_modules/hexoid/dist/index.js | 15 + node_modules/hexoid/dist/index.min.js | 1 + node_modules/hexoid/dist/index.mjs | 15 + node_modules/hexoid/hexoid.d.ts | 1 + node_modules/hexoid/license | 9 + node_modules/hexoid/package.json | 41 + node_modules/hexoid/readme.md | 119 + node_modules/indent-string/index.d.ts | 42 + node_modules/indent-string/index.js | 35 + node_modules/indent-string/license | 9 + node_modules/indent-string/package.json | 37 + node_modules/indent-string/readme.md | 70 + node_modules/inherits/LICENSE | 16 + node_modules/inherits/README.md | 42 + node_modules/inherits/inherits.js | 9 + node_modules/inherits/inherits_browser.js | 27 + node_modules/inherits/package.json | 29 + node_modules/lru-cache/LICENSE | 15 + node_modules/lru-cache/README.md | 166 + node_modules/lru-cache/index.js | 334 + node_modules/lru-cache/package.json | 34 + node_modules/methods/HISTORY.md | 29 + node_modules/methods/LICENSE | 24 + node_modules/methods/README.md | 51 + node_modules/methods/index.js | 69 + node_modules/methods/package.json | 36 + node_modules/mime-db/HISTORY.md | 507 + node_modules/mime-db/LICENSE | 23 + node_modules/mime-db/README.md | 100 + node_modules/mime-db/db.json | 8519 +++ node_modules/mime-db/index.js | 12 + node_modules/mime-db/package.json | 60 + node_modules/mime-types/HISTORY.md | 397 + node_modules/mime-types/LICENSE | 23 + node_modules/mime-types/README.md | 113 + node_modules/mime-types/index.js | 188 + node_modules/mime-types/package.json | 44 + node_modules/mime/CHANGELOG.md | 296 + node_modules/mime/LICENSE | 21 + node_modules/mime/Mime.js | 97 + node_modules/mime/README.md | 187 + node_modules/mime/cli.js | 46 + node_modules/mime/index.js | 4 + node_modules/mime/lite.js | 4 + node_modules/mime/package.json | 52 + node_modules/mime/types/other.js | 1 + node_modules/mime/types/standard.js | 1 + node_modules/morgan/HISTORY.md | 215 + node_modules/morgan/LICENSE | 23 + node_modules/morgan/README.md | 417 + node_modules/morgan/index.js | 544 + node_modules/morgan/package.json | 52 + node_modules/ms/index.js | 152 + node_modules/ms/license.md | 21 + node_modules/ms/package.json | 37 + node_modules/ms/readme.md | 51 + node_modules/object-inspect/.eslintrc | 53 + .../object-inspect/.github/FUNDING.yml | 12 + node_modules/object-inspect/.nycrc | 13 + node_modules/object-inspect/CHANGELOG.md | 360 + node_modules/object-inspect/LICENSE | 21 + node_modules/object-inspect/example/all.js | 23 + .../object-inspect/example/circular.js | 6 + node_modules/object-inspect/example/fn.js | 5 + .../object-inspect/example/inspect.js | 10 + node_modules/object-inspect/index.js | 512 + .../object-inspect/package-support.json | 20 + node_modules/object-inspect/package.json | 94 + node_modules/object-inspect/readme.markdown | 86 + node_modules/object-inspect/test-core-js.js | 26 + node_modules/object-inspect/test/bigint.js | 58 + .../object-inspect/test/browser/dom.js | 15 + node_modules/object-inspect/test/circular.js | 16 + node_modules/object-inspect/test/deep.js | 12 + node_modules/object-inspect/test/element.js | 53 + node_modules/object-inspect/test/err.js | 48 + node_modules/object-inspect/test/fakes.js | 29 + node_modules/object-inspect/test/fn.js | 76 + node_modules/object-inspect/test/has.js | 15 + node_modules/object-inspect/test/holes.js | 15 + .../object-inspect/test/indent-option.js | 271 + node_modules/object-inspect/test/inspect.js | 139 + node_modules/object-inspect/test/lowbyte.js | 12 + node_modules/object-inspect/test/number.js | 58 + .../object-inspect/test/quoteStyle.js | 17 + .../object-inspect/test/toStringTag.js | 40 + node_modules/object-inspect/test/undef.js | 12 + node_modules/object-inspect/test/values.js | 211 + node_modules/object-inspect/util.inspect.js | 1 + node_modules/on-finished/HISTORY.md | 88 + node_modules/on-finished/LICENSE | 23 + node_modules/on-finished/README.md | 154 + node_modules/on-finished/index.js | 196 + node_modules/on-finished/package.json | 31 + node_modules/on-headers/HISTORY.md | 21 + node_modules/on-headers/LICENSE | 22 + node_modules/on-headers/README.md | 81 + node_modules/on-headers/index.js | 132 + node_modules/on-headers/package.json | 42 + node_modules/once/LICENSE | 15 + node_modules/once/README.md | 79 + node_modules/once/once.js | 42 + node_modules/once/package.json | 33 + node_modules/prefix/.npmignore | 3 + node_modules/prefix/LICENSE | 23 + node_modules/prefix/Makefile | 10 + node_modules/prefix/README.md | 39 + node_modules/prefix/component.json | 9 + node_modules/prefix/index.js | 69 + node_modules/prefix/package.json | 15 + node_modules/prefix/test/index.html | 23 + node_modules/prefix/test/prefix.spec.js | 59 + node_modules/qs/.editorconfig | 43 + node_modules/qs/.eslintrc | 38 + node_modules/qs/.github/FUNDING.yml | 12 + node_modules/qs/.nycrc | 13 + node_modules/qs/CHANGELOG.md | 546 + node_modules/qs/LICENSE.md | 29 + node_modules/qs/README.md | 625 + node_modules/qs/dist/qs.js | 2054 + node_modules/qs/lib/formats.js | 23 + node_modules/qs/lib/index.js | 11 + node_modules/qs/lib/parse.js | 263 + node_modules/qs/lib/stringify.js | 326 + node_modules/qs/lib/utils.js | 252 + node_modules/qs/package.json | 77 + node_modules/qs/test/parse.js | 855 + node_modules/qs/test/stringify.js | 909 + node_modules/qs/test/utils.js | 136 + node_modules/readable-stream/CONTRIBUTING.md | 38 + node_modules/readable-stream/GOVERNANCE.md | 136 + node_modules/readable-stream/LICENSE | 47 + node_modules/readable-stream/README.md | 106 + .../readable-stream/errors-browser.js | 127 + node_modules/readable-stream/errors.js | 116 + .../readable-stream/experimentalWarning.js | 17 + .../readable-stream/lib/_stream_duplex.js | 139 + .../lib/_stream_passthrough.js | 39 + .../readable-stream/lib/_stream_readable.js | 1124 + .../readable-stream/lib/_stream_transform.js | 201 + .../readable-stream/lib/_stream_writable.js | 697 + .../lib/internal/streams/async_iterator.js | 207 + .../lib/internal/streams/buffer_list.js | 210 + .../lib/internal/streams/destroy.js | 105 + .../lib/internal/streams/end-of-stream.js | 104 + .../lib/internal/streams/from-browser.js | 3 + .../lib/internal/streams/from.js | 64 + .../lib/internal/streams/pipeline.js | 97 + .../lib/internal/streams/state.js | 27 + .../lib/internal/streams/stream-browser.js | 1 + .../lib/internal/streams/stream.js | 1 + node_modules/readable-stream/package.json | 68 + .../readable-stream/readable-browser.js | 9 + node_modules/readable-stream/readable.js | 16 + node_modules/safe-buffer/LICENSE | 21 + node_modules/safe-buffer/README.md | 584 + node_modules/safe-buffer/index.d.ts | 187 + node_modules/safe-buffer/index.js | 62 + node_modules/safe-buffer/package.json | 37 + node_modules/semver/LICENSE | 15 + node_modules/semver/README.md | 568 + node_modules/semver/bin/semver.js | 183 + node_modules/semver/classes/comparator.js | 136 + node_modules/semver/classes/index.js | 5 + node_modules/semver/classes/range.js | 522 + node_modules/semver/classes/semver.js | 287 + node_modules/semver/functions/clean.js | 6 + node_modules/semver/functions/cmp.js | 52 + node_modules/semver/functions/coerce.js | 52 + .../semver/functions/compare-build.js | 7 + .../semver/functions/compare-loose.js | 3 + node_modules/semver/functions/compare.js | 5 + node_modules/semver/functions/diff.js | 23 + node_modules/semver/functions/eq.js | 3 + node_modules/semver/functions/gt.js | 3 + node_modules/semver/functions/gte.js | 3 + node_modules/semver/functions/inc.js | 18 + node_modules/semver/functions/lt.js | 3 + node_modules/semver/functions/lte.js | 3 + node_modules/semver/functions/major.js | 3 + node_modules/semver/functions/minor.js | 3 + node_modules/semver/functions/neq.js | 3 + node_modules/semver/functions/parse.js | 33 + node_modules/semver/functions/patch.js | 3 + node_modules/semver/functions/prerelease.js | 6 + node_modules/semver/functions/rcompare.js | 3 + node_modules/semver/functions/rsort.js | 3 + node_modules/semver/functions/satisfies.js | 10 + node_modules/semver/functions/sort.js | 3 + node_modules/semver/functions/valid.js | 6 + node_modules/semver/index.js | 88 + node_modules/semver/internal/constants.js | 17 + node_modules/semver/internal/debug.js | 9 + node_modules/semver/internal/identifiers.js | 23 + node_modules/semver/internal/parse-options.js | 11 + node_modules/semver/internal/re.js | 182 + node_modules/semver/package.json | 86 + node_modules/semver/preload.js | 2 + node_modules/semver/range.bnf | 16 + node_modules/semver/ranges/gtr.js | 4 + node_modules/semver/ranges/intersects.js | 7 + node_modules/semver/ranges/ltr.js | 4 + node_modules/semver/ranges/max-satisfying.js | 25 + node_modules/semver/ranges/min-satisfying.js | 24 + node_modules/semver/ranges/min-version.js | 61 + node_modules/semver/ranges/outside.js | 80 + node_modules/semver/ranges/simplify.js | 47 + node_modules/semver/ranges/subset.js | 244 + node_modules/semver/ranges/to-comparators.js | 8 + node_modules/semver/ranges/valid.js | 11 + node_modules/side-channel/.eslintignore | 1 + node_modules/side-channel/.eslintrc | 11 + node_modules/side-channel/.github/FUNDING.yml | 12 + node_modules/side-channel/.nycrc | 13 + node_modules/side-channel/CHANGELOG.md | 65 + node_modules/side-channel/LICENSE | 21 + node_modules/side-channel/README.md | 2 + node_modules/side-channel/index.js | 124 + node_modules/side-channel/package.json | 67 + node_modules/side-channel/test/index.js | 78 + node_modules/smee-client/LICENSE | 15 + node_modules/smee-client/README.md | 42 + node_modules/smee-client/bin/smee.js | 35 + node_modules/smee-client/index.d.ts | 20 + node_modules/smee-client/index.js | 62 + .../node_modules/eventsource/.editorconfig | 27 + .../node_modules/eventsource/.travis.yml | 10 + .../node_modules/eventsource/CONTRIBUTING.md | 13 + .../node_modules/eventsource/HISTORY.md | 151 + .../node_modules/eventsource/LICENSE | 22 + .../node_modules/eventsource/README.md | 89 + .../example/eventsource-polyfill.js | 9726 ++++ .../eventsource/example/index.html | 40 + .../eventsource/example/sse-client.js | 5 + .../eventsource/example/sse-server.js | 29 + .../eventsource/lib/eventsource-polyfill.js | 9 + .../eventsource/lib/eventsource.js | 485 + .../node_modules/eventsource/package.json | 56 + node_modules/smee-client/package.json | 54 + node_modules/string_decoder/LICENSE | 48 + node_modules/string_decoder/README.md | 47 + .../string_decoder/lib/string_decoder.js | 296 + .../node_modules/safe-buffer/LICENSE | 21 + .../node_modules/safe-buffer/README.md | 584 + .../node_modules/safe-buffer/index.d.ts | 187 + .../node_modules/safe-buffer/index.js | 65 + .../node_modules/safe-buffer/package.json | 51 + node_modules/string_decoder/package.json | 34 + node_modules/superagent/HISTORY.md | 692 + node_modules/superagent/LICENSE | 22 + node_modules/superagent/README.md | 274 + node_modules/superagent/dist/superagent.js | 3975 ++ .../superagent/dist/superagent.min.js | 1 + node_modules/superagent/lib/agent-base.js | 60 + node_modules/superagent/lib/client.js | 1052 + node_modules/superagent/lib/node/agent.js | 134 + .../superagent/lib/node/http2wrapper.js | 220 + node_modules/superagent/lib/node/index.js | 1433 + .../superagent/lib/node/parsers/image.js | 13 + .../superagent/lib/node/parsers/index.js | 12 + .../superagent/lib/node/parsers/json.js | 26 + .../superagent/lib/node/parsers/text.js | 11 + .../superagent/lib/node/parsers/urlencoded.js | 22 + node_modules/superagent/lib/node/response.js | 137 + node_modules/superagent/lib/node/unzip.js | 72 + node_modules/superagent/lib/request-base.js | 793 + node_modules/superagent/lib/response-base.js | 113 + node_modules/superagent/lib/utils.js | 142 + .../superagent/node_modules/debug/LICENSE | 20 + .../superagent/node_modules/debug/README.md | 481 + .../node_modules/debug/package.json | 59 + .../node_modules/debug/src/browser.js | 269 + .../node_modules/debug/src/common.js | 274 + .../node_modules/debug/src/index.js | 10 + .../superagent/node_modules/debug/src/node.js | 263 + .../superagent/node_modules/ms/index.js | 162 + .../superagent/node_modules/ms/license.md | 21 + .../superagent/node_modules/ms/package.json | 37 + .../superagent/node_modules/ms/readme.md | 60 + node_modules/superagent/package.json | 136 + node_modules/util-deprecate/History.md | 16 + node_modules/util-deprecate/LICENSE | 24 + node_modules/util-deprecate/README.md | 53 + node_modules/util-deprecate/browser.js | 67 + node_modules/util-deprecate/node.js | 6 + node_modules/util-deprecate/package.json | 27 + node_modules/validator/LICENSE | 20 + node_modules/validator/README.md | 274 + node_modules/validator/es/index.js | 198 + node_modules/validator/es/lib/alpha.js | 123 + node_modules/validator/es/lib/blacklist.js | 5 + node_modules/validator/es/lib/contains.js | 17 + node_modules/validator/es/lib/equals.js | 5 + node_modules/validator/es/lib/escape.js | 5 + node_modules/validator/es/lib/isAfter.js | 9 + node_modules/validator/es/lib/isAlpha.js | 26 + .../validator/es/lib/isAlphanumeric.js | 26 + node_modules/validator/es/lib/isAscii.js | 10 + node_modules/validator/es/lib/isBIC.js | 14 + node_modules/validator/es/lib/isBase32.js | 12 + node_modules/validator/es/lib/isBase58.js | 12 + node_modules/validator/es/lib/isBase64.js | 23 + node_modules/validator/es/lib/isBefore.js | 9 + node_modules/validator/es/lib/isBoolean.js | 16 + node_modules/validator/es/lib/isBtcAddress.js | 13 + node_modules/validator/es/lib/isByteLength.js | 22 + node_modules/validator/es/lib/isCreditCard.js | 40 + node_modules/validator/es/lib/isCurrency.js | 77 + node_modules/validator/es/lib/isDataURI.js | 39 + node_modules/validator/es/lib/isDate.js | 86 + node_modules/validator/es/lib/isDecimal.js | 26 + .../validator/es/lib/isDivisibleBy.js | 6 + node_modules/validator/es/lib/isEAN.js | 72 + node_modules/validator/es/lib/isEmail.js | 180 + node_modules/validator/es/lib/isEmpty.js | 10 + .../validator/es/lib/isEthereumAddress.js | 6 + node_modules/validator/es/lib/isFQDN.js | 74 + node_modules/validator/es/lib/isFloat.js | 16 + node_modules/validator/es/lib/isFullWidth.js | 6 + node_modules/validator/es/lib/isHSL.js | 14 + node_modules/validator/es/lib/isHalfWidth.js | 6 + node_modules/validator/es/lib/isHash.js | 21 + node_modules/validator/es/lib/isHexColor.js | 6 + .../validator/es/lib/isHexadecimal.js | 6 + node_modules/validator/es/lib/isIBAN.js | 138 + node_modules/validator/es/lib/isIMEI.js | 47 + node_modules/validator/es/lib/isIP.js | 62 + node_modules/validator/es/lib/isIPRange.js | 47 + node_modules/validator/es/lib/isISBN.js | 51 + node_modules/validator/es/lib/isISIN.js | 60 + .../validator/es/lib/isISO31661Alpha2.js | 8 + .../validator/es/lib/isISO31661Alpha3.js | 7 + node_modules/validator/es/lib/isISO4217.js | 8 + node_modules/validator/es/lib/isISO8601.js | 47 + node_modules/validator/es/lib/isISRC.js | 7 + node_modules/validator/es/lib/isISSN.js | 23 + .../validator/es/lib/isIdentityCard.js | 369 + node_modules/validator/es/lib/isIn.js | 28 + node_modules/validator/es/lib/isInt.js | 16 + node_modules/validator/es/lib/isJSON.js | 26 + node_modules/validator/es/lib/isJWT.js | 17 + node_modules/validator/es/lib/isLatLong.js | 22 + node_modules/validator/es/lib/isLength.js | 23 + .../validator/es/lib/isLicensePlate.js | 44 + node_modules/validator/es/lib/isLocale.js | 11 + node_modules/validator/es/lib/isLowercase.js | 5 + node_modules/validator/es/lib/isMACAddress.js | 16 + node_modules/validator/es/lib/isMD5.js | 6 + node_modules/validator/es/lib/isMagnetURI.js | 6 + node_modules/validator/es/lib/isMimeType.js | 39 + .../validator/es/lib/isMobilePhone.js | 187 + node_modules/validator/es/lib/isMongoId.js | 6 + node_modules/validator/es/lib/isMultibyte.js | 10 + node_modules/validator/es/lib/isNumeric.js | 12 + node_modules/validator/es/lib/isOctal.js | 6 + .../validator/es/lib/isPassportNumber.js | 126 + node_modules/validator/es/lib/isPort.js | 7 + node_modules/validator/es/lib/isPostalCode.js | 96 + node_modules/validator/es/lib/isRFC3339.js | 20 + node_modules/validator/es/lib/isRgbColor.js | 15 + node_modules/validator/es/lib/isSemVer.js | 14 + node_modules/validator/es/lib/isSlug.js | 6 + .../validator/es/lib/isStrongPassword.js | 101 + .../validator/es/lib/isSurrogatePair.js | 6 + node_modules/validator/es/lib/isTaxID.js | 1508 + node_modules/validator/es/lib/isURL.js | 193 + node_modules/validator/es/lib/isUUID.js | 14 + node_modules/validator/es/lib/isUppercase.js | 5 + node_modules/validator/es/lib/isVAT.js | 16 + .../validator/es/lib/isVariableWidth.js | 7 + .../validator/es/lib/isWhitelisted.js | 12 + node_modules/validator/es/lib/ltrim.js | 7 + node_modules/validator/es/lib/matches.js | 10 + .../validator/es/lib/normalizeEmail.js | 138 + node_modules/validator/es/lib/rtrim.js | 19 + node_modules/validator/es/lib/stripLow.js | 7 + node_modules/validator/es/lib/toBoolean.js | 10 + node_modules/validator/es/lib/toDate.js | 6 + node_modules/validator/es/lib/toFloat.js | 5 + node_modules/validator/es/lib/toInt.js | 5 + node_modules/validator/es/lib/trim.js | 5 + node_modules/validator/es/lib/unescape.js | 7 + .../validator/es/lib/util/algorithms.js | 88 + .../validator/es/lib/util/assertString.js | 12 + .../validator/es/lib/util/includes.js | 7 + node_modules/validator/es/lib/util/merge.js | 12 + .../validator/es/lib/util/multilineRegex.js | 12 + .../validator/es/lib/util/toString.js | 15 + node_modules/validator/es/lib/util/typeOf.js | 10 + node_modules/validator/es/lib/whitelist.js | 5 + node_modules/validator/index.js | 309 + node_modules/validator/lib/alpha.js | 137 + node_modules/validator/lib/blacklist.js | 18 + node_modules/validator/lib/contains.js | 33 + node_modules/validator/lib/equals.js | 18 + node_modules/validator/lib/escape.js | 18 + node_modules/validator/lib/isAfter.js | 23 + node_modules/validator/lib/isAlpha.js | 40 + node_modules/validator/lib/isAlphanumeric.js | 40 + node_modules/validator/lib/isAscii.js | 22 + node_modules/validator/lib/isBIC.js | 29 + node_modules/validator/lib/isBase32.js | 26 + node_modules/validator/lib/isBase58.js | 26 + node_modules/validator/lib/isBase64.js | 38 + node_modules/validator/lib/isBefore.js | 23 + node_modules/validator/lib/isBoolean.js | 30 + node_modules/validator/lib/isBtcAddress.js | 27 + node_modules/validator/lib/isByteLength.js | 34 + node_modules/validator/lib/isCreditCard.js | 52 + node_modules/validator/lib/isCurrency.js | 91 + node_modules/validator/lib/isDataURI.js | 53 + node_modules/validator/lib/isDate.js | 99 + node_modules/validator/lib/isDecimal.js | 42 + node_modules/validator/lib/isDivisibleBy.js | 20 + node_modules/validator/lib/isEAN.js | 85 + node_modules/validator/lib/isEmail.js | 197 + node_modules/validator/lib/isEmpty.js | 25 + .../validator/lib/isEthereumAddress.js | 20 + node_modules/validator/lib/isFQDN.js | 89 + node_modules/validator/lib/isFloat.js | 29 + node_modules/validator/lib/isFullWidth.js | 19 + node_modules/validator/lib/isHSL.js | 28 + node_modules/validator/lib/isHalfWidth.js | 19 + node_modules/validator/lib/isHash.js | 35 + node_modules/validator/lib/isHexColor.js | 20 + node_modules/validator/lib/isHexadecimal.js | 20 + node_modules/validator/lib/isIBAN.js | 150 + node_modules/validator/lib/isIMEI.js | 61 + node_modules/validator/lib/isIP.js | 75 + node_modules/validator/lib/isIPRange.js | 62 + node_modules/validator/lib/isISBN.js | 65 + node_modules/validator/lib/isISIN.js | 73 + .../validator/lib/isISO31661Alpha2.js | 22 + .../validator/lib/isISO31661Alpha3.js | 21 + node_modules/validator/lib/isISO4217.js | 22 + node_modules/validator/lib/isISO8601.js | 59 + node_modules/validator/lib/isISRC.js | 21 + node_modules/validator/lib/isISSN.js | 37 + node_modules/validator/lib/isIdentityCard.js | 384 + node_modules/validator/lib/isIn.js | 42 + node_modules/validator/lib/isInt.js | 30 + node_modules/validator/lib/isJSON.js | 41 + node_modules/validator/lib/isJWT.js | 31 + node_modules/validator/lib/isLatLong.js | 37 + node_modules/validator/lib/isLength.js | 35 + node_modules/validator/lib/isLicensePlate.js | 58 + node_modules/validator/lib/isLocale.js | 25 + node_modules/validator/lib/isLowercase.js | 18 + node_modules/validator/lib/isMACAddress.js | 30 + node_modules/validator/lib/isMD5.js | 20 + node_modules/validator/lib/isMagnetURI.js | 20 + node_modules/validator/lib/isMimeType.js | 51 + node_modules/validator/lib/isMobilePhone.js | 200 + node_modules/validator/lib/isMongoId.js | 20 + node_modules/validator/lib/isMultibyte.js | 22 + node_modules/validator/lib/isNumeric.js | 27 + node_modules/validator/lib/isOctal.js | 20 + .../validator/lib/isPassportNumber.js | 138 + node_modules/validator/lib/isPort.js | 20 + node_modules/validator/lib/isPostalCode.js | 109 + node_modules/validator/lib/isRFC3339.js | 33 + node_modules/validator/lib/isRgbColor.js | 29 + node_modules/validator/lib/isSemVer.js | 28 + node_modules/validator/lib/isSlug.js | 20 + .../validator/lib/isStrongPassword.js | 115 + node_modules/validator/lib/isSurrogatePair.js | 20 + node_modules/validator/lib/isTaxID.js | 1528 + node_modules/validator/lib/isURL.js | 208 + node_modules/validator/lib/isUUID.js | 28 + node_modules/validator/lib/isUppercase.js | 18 + node_modules/validator/lib/isVAT.js | 29 + node_modules/validator/lib/isVariableWidth.js | 22 + node_modules/validator/lib/isWhitelisted.js | 25 + node_modules/validator/lib/ltrim.js | 20 + node_modules/validator/lib/matches.js | 23 + node_modules/validator/lib/normalizeEmail.js | 151 + node_modules/validator/lib/rtrim.js | 32 + node_modules/validator/lib/stripLow.js | 21 + node_modules/validator/lib/toBoolean.js | 23 + node_modules/validator/lib/toDate.js | 19 + node_modules/validator/lib/toFloat.js | 18 + node_modules/validator/lib/toInt.js | 18 + node_modules/validator/lib/trim.js | 19 + node_modules/validator/lib/unescape.js | 20 + node_modules/validator/lib/util/algorithms.js | 101 + .../validator/lib/util/assertString.js | 22 + node_modules/validator/lib/util/includes.js | 17 + node_modules/validator/lib/util/merge.js | 22 + .../validator/lib/util/multilineRegex.js | 22 + node_modules/validator/lib/util/toString.js | 25 + node_modules/validator/lib/util/typeOf.js | 20 + node_modules/validator/lib/whitelist.js | 18 + node_modules/validator/package.json | 75 + node_modules/validator/validator.js | 5094 ++ node_modules/validator/validator.min.js | 23 + node_modules/wrappy/LICENSE | 15 + node_modules/wrappy/README.md | 36 + node_modules/wrappy/package.json | 29 + node_modules/wrappy/wrappy.js | 33 + node_modules/yallist/LICENSE | 15 + node_modules/yallist/README.md | 204 + node_modules/yallist/iterator.js | 8 + node_modules/yallist/package.json | 29 + node_modules/yallist/yallist.js | 426 + package-lock.json | 1055 + package.json | 6 + watch | 2 +- 988 files changed, 247249 insertions(+), 1 deletion(-) create mode 120000 node_modules/.bin/mime create mode 120000 node_modules/.bin/semver create mode 120000 node_modules/.bin/smee create mode 100644 node_modules/.package-lock.json create mode 100644 node_modules/@octokit/openapi-types/LICENSE create mode 100644 node_modules/@octokit/openapi-types/README.md create mode 100644 node_modules/@octokit/openapi-types/package.json create mode 100644 node_modules/@octokit/openapi-types/types.d.ts create mode 100644 node_modules/@octokit/request-error/LICENSE create mode 100644 node_modules/@octokit/request-error/README.md create mode 100644 node_modules/@octokit/request-error/dist-node/index.js create mode 100644 node_modules/@octokit/request-error/dist-node/index.js.map create mode 100644 node_modules/@octokit/request-error/dist-src/index.js create mode 100644 node_modules/@octokit/request-error/dist-src/types.js create mode 100644 node_modules/@octokit/request-error/dist-types/index.d.ts create mode 100644 node_modules/@octokit/request-error/dist-types/types.d.ts create mode 100644 node_modules/@octokit/request-error/dist-web/index.js create mode 100644 node_modules/@octokit/request-error/dist-web/index.js.map create mode 100644 node_modules/@octokit/request-error/package.json create mode 100644 node_modules/@octokit/types/LICENSE create mode 100644 node_modules/@octokit/types/README.md create mode 100644 node_modules/@octokit/types/dist-node/index.js create mode 100644 node_modules/@octokit/types/dist-node/index.js.map create mode 100644 node_modules/@octokit/types/dist-src/AuthInterface.js create mode 100644 node_modules/@octokit/types/dist-src/EndpointDefaults.js create mode 100644 node_modules/@octokit/types/dist-src/EndpointInterface.js create mode 100644 node_modules/@octokit/types/dist-src/EndpointOptions.js create mode 100644 node_modules/@octokit/types/dist-src/Fetch.js create mode 100644 node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js create mode 100644 node_modules/@octokit/types/dist-src/OctokitResponse.js create mode 100644 node_modules/@octokit/types/dist-src/RequestError.js create mode 100644 node_modules/@octokit/types/dist-src/RequestHeaders.js create mode 100644 node_modules/@octokit/types/dist-src/RequestInterface.js create mode 100644 node_modules/@octokit/types/dist-src/RequestMethod.js create mode 100644 node_modules/@octokit/types/dist-src/RequestOptions.js create mode 100644 node_modules/@octokit/types/dist-src/RequestParameters.js create mode 100644 node_modules/@octokit/types/dist-src/RequestRequestOptions.js create mode 100644 node_modules/@octokit/types/dist-src/ResponseHeaders.js create mode 100644 node_modules/@octokit/types/dist-src/Route.js create mode 100644 node_modules/@octokit/types/dist-src/Signal.js create mode 100644 node_modules/@octokit/types/dist-src/StrategyInterface.js create mode 100644 node_modules/@octokit/types/dist-src/Url.js create mode 100644 node_modules/@octokit/types/dist-src/VERSION.js create mode 100644 node_modules/@octokit/types/dist-src/generated/Endpoints.js create mode 100644 node_modules/@octokit/types/dist-src/index.js create mode 100644 node_modules/@octokit/types/dist-types/AuthInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts create mode 100644 node_modules/@octokit/types/dist-types/EndpointInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/EndpointOptions.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Fetch.d.ts create mode 100644 node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts create mode 100644 node_modules/@octokit/types/dist-types/OctokitResponse.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestError.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestHeaders.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestMethod.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestOptions.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestParameters.d.ts create mode 100644 node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts create mode 100644 node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Route.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Signal.d.ts create mode 100644 node_modules/@octokit/types/dist-types/StrategyInterface.d.ts create mode 100644 node_modules/@octokit/types/dist-types/Url.d.ts create mode 100644 node_modules/@octokit/types/dist-types/VERSION.d.ts create mode 100644 node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts create mode 100644 node_modules/@octokit/types/dist-types/index.d.ts create mode 100644 node_modules/@octokit/types/dist-web/index.js create mode 100644 node_modules/@octokit/types/dist-web/index.js.map create mode 100644 node_modules/@octokit/types/package.json create mode 100644 node_modules/@octokit/webhooks-methods/LICENSE create mode 100644 node_modules/@octokit/webhooks-methods/README.md create mode 100644 node_modules/@octokit/webhooks-methods/dist-node/index.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-node/index.js.map create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/index.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/node/sign.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/node/verify.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/types.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/utils.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/version.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-src/web.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/index.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/types.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/version.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-types/web.d.ts create mode 100644 node_modules/@octokit/webhooks-methods/dist-web/index.js create mode 100644 node_modules/@octokit/webhooks-methods/dist-web/index.js.map create mode 100644 node_modules/@octokit/webhooks-methods/package.json create mode 100644 node_modules/@octokit/webhooks-types/README.md create mode 100644 node_modules/@octokit/webhooks-types/package.json create mode 100644 node_modules/@octokit/webhooks-types/schema.d.ts create mode 100644 node_modules/@octokit/webhooks/LICENSE.md create mode 100644 node_modules/@octokit/webhooks/README.md create mode 100644 node_modules/@octokit/webhooks/dist-node/index.js create mode 100644 node_modules/@octokit/webhooks/dist-node/index.js.map create mode 100644 node_modules/@octokit/webhooks/dist-src/createLogger.js create mode 100644 node_modules/@octokit/webhooks/dist-src/event-handler/index.js create mode 100644 node_modules/@octokit/webhooks/dist-src/event-handler/on.js create mode 100644 node_modules/@octokit/webhooks/dist-src/event-handler/receive.js create mode 100644 node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js create mode 100644 node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js create mode 100644 node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js create mode 100644 node_modules/@octokit/webhooks/dist-src/index.js create mode 100644 node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js create mode 100644 node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js create mode 100644 node_modules/@octokit/webhooks/dist-src/middleware/node/index.js create mode 100644 node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js create mode 100644 node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js create mode 100644 node_modules/@octokit/webhooks/dist-src/middleware/node/types.js create mode 100644 node_modules/@octokit/webhooks/dist-src/sign.js create mode 100644 node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js create mode 100644 node_modules/@octokit/webhooks/dist-src/types.js create mode 100644 node_modules/@octokit/webhooks/dist-src/verify-and-receive.js create mode 100644 node_modules/@octokit/webhooks/dist-src/verify.js create mode 100644 node_modules/@octokit/webhooks/dist-types/createLogger.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/index.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/sign.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/types.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-types/verify.d.ts create mode 100644 node_modules/@octokit/webhooks/dist-web/index.js create mode 100644 node_modules/@octokit/webhooks/dist-web/index.js.map create mode 100644 node_modules/@octokit/webhooks/package.json create mode 100755 node_modules/@types/node/LICENSE create mode 100755 node_modules/@types/node/README.md create mode 100755 node_modules/@types/node/assert.d.ts create mode 100755 node_modules/@types/node/assert/strict.d.ts create mode 100755 node_modules/@types/node/async_hooks.d.ts create mode 100755 node_modules/@types/node/buffer.d.ts create mode 100755 node_modules/@types/node/child_process.d.ts create mode 100755 node_modules/@types/node/cluster.d.ts create mode 100755 node_modules/@types/node/console.d.ts create mode 100755 node_modules/@types/node/constants.d.ts create mode 100755 node_modules/@types/node/crypto.d.ts create mode 100755 node_modules/@types/node/dgram.d.ts create mode 100755 node_modules/@types/node/diagnostics_channel.d.ts create mode 100755 node_modules/@types/node/dns.d.ts create mode 100755 node_modules/@types/node/dns/promises.d.ts create mode 100755 node_modules/@types/node/dom-events.d.ts create mode 100755 node_modules/@types/node/domain.d.ts create mode 100755 node_modules/@types/node/events.d.ts create mode 100755 node_modules/@types/node/fs.d.ts create mode 100755 node_modules/@types/node/fs/promises.d.ts create mode 100755 node_modules/@types/node/globals.d.ts create mode 100755 node_modules/@types/node/globals.global.d.ts create mode 100755 node_modules/@types/node/http.d.ts create mode 100755 node_modules/@types/node/http2.d.ts create mode 100755 node_modules/@types/node/https.d.ts create mode 100755 node_modules/@types/node/index.d.ts create mode 100755 node_modules/@types/node/inspector.d.ts create mode 100755 node_modules/@types/node/module.d.ts create mode 100755 node_modules/@types/node/net.d.ts create mode 100755 node_modules/@types/node/os.d.ts create mode 100755 node_modules/@types/node/package.json create mode 100755 node_modules/@types/node/path.d.ts create mode 100755 node_modules/@types/node/perf_hooks.d.ts create mode 100755 node_modules/@types/node/process.d.ts create mode 100755 node_modules/@types/node/punycode.d.ts create mode 100755 node_modules/@types/node/querystring.d.ts create mode 100755 node_modules/@types/node/readline.d.ts create mode 100755 node_modules/@types/node/readline/promises.d.ts create mode 100755 node_modules/@types/node/repl.d.ts create mode 100755 node_modules/@types/node/stream.d.ts create mode 100755 node_modules/@types/node/stream/consumers.d.ts create mode 100755 node_modules/@types/node/stream/promises.d.ts create mode 100755 node_modules/@types/node/stream/web.d.ts create mode 100755 node_modules/@types/node/string_decoder.d.ts create mode 100755 node_modules/@types/node/test.d.ts create mode 100755 node_modules/@types/node/timers.d.ts create mode 100755 node_modules/@types/node/timers/promises.d.ts create mode 100755 node_modules/@types/node/tls.d.ts create mode 100755 node_modules/@types/node/trace_events.d.ts create mode 100755 node_modules/@types/node/ts4.8/assert.d.ts create mode 100755 node_modules/@types/node/ts4.8/assert/strict.d.ts create mode 100755 node_modules/@types/node/ts4.8/async_hooks.d.ts create mode 100755 node_modules/@types/node/ts4.8/buffer.d.ts create mode 100755 node_modules/@types/node/ts4.8/child_process.d.ts create mode 100755 node_modules/@types/node/ts4.8/cluster.d.ts create mode 100755 node_modules/@types/node/ts4.8/console.d.ts create mode 100755 node_modules/@types/node/ts4.8/constants.d.ts create mode 100755 node_modules/@types/node/ts4.8/crypto.d.ts create mode 100755 node_modules/@types/node/ts4.8/dgram.d.ts create mode 100755 node_modules/@types/node/ts4.8/diagnostics_channel.d.ts create mode 100755 node_modules/@types/node/ts4.8/dns.d.ts create mode 100755 node_modules/@types/node/ts4.8/dns/promises.d.ts create mode 100755 node_modules/@types/node/ts4.8/dom-events.d.ts create mode 100755 node_modules/@types/node/ts4.8/domain.d.ts create mode 100755 node_modules/@types/node/ts4.8/events.d.ts create mode 100755 node_modules/@types/node/ts4.8/fs.d.ts create mode 100755 node_modules/@types/node/ts4.8/fs/promises.d.ts create mode 100755 node_modules/@types/node/ts4.8/globals.d.ts create mode 100755 node_modules/@types/node/ts4.8/globals.global.d.ts create mode 100755 node_modules/@types/node/ts4.8/http.d.ts create mode 100755 node_modules/@types/node/ts4.8/http2.d.ts create mode 100755 node_modules/@types/node/ts4.8/https.d.ts create mode 100755 node_modules/@types/node/ts4.8/index.d.ts create mode 100755 node_modules/@types/node/ts4.8/inspector.d.ts create mode 100755 node_modules/@types/node/ts4.8/module.d.ts create mode 100755 node_modules/@types/node/ts4.8/net.d.ts create mode 100755 node_modules/@types/node/ts4.8/os.d.ts create mode 100755 node_modules/@types/node/ts4.8/path.d.ts create mode 100755 node_modules/@types/node/ts4.8/perf_hooks.d.ts create mode 100755 node_modules/@types/node/ts4.8/process.d.ts create mode 100755 node_modules/@types/node/ts4.8/punycode.d.ts create mode 100755 node_modules/@types/node/ts4.8/querystring.d.ts create mode 100755 node_modules/@types/node/ts4.8/readline.d.ts create mode 100755 node_modules/@types/node/ts4.8/readline/promises.d.ts create mode 100755 node_modules/@types/node/ts4.8/repl.d.ts create mode 100755 node_modules/@types/node/ts4.8/stream.d.ts create mode 100755 node_modules/@types/node/ts4.8/stream/consumers.d.ts create mode 100755 node_modules/@types/node/ts4.8/stream/promises.d.ts create mode 100755 node_modules/@types/node/ts4.8/stream/web.d.ts create mode 100755 node_modules/@types/node/ts4.8/string_decoder.d.ts create mode 100755 node_modules/@types/node/ts4.8/test.d.ts create mode 100755 node_modules/@types/node/ts4.8/timers.d.ts create mode 100755 node_modules/@types/node/ts4.8/timers/promises.d.ts create mode 100755 node_modules/@types/node/ts4.8/tls.d.ts create mode 100755 node_modules/@types/node/ts4.8/trace_events.d.ts create mode 100755 node_modules/@types/node/ts4.8/tty.d.ts create mode 100755 node_modules/@types/node/ts4.8/url.d.ts create mode 100755 node_modules/@types/node/ts4.8/util.d.ts create mode 100755 node_modules/@types/node/ts4.8/v8.d.ts create mode 100755 node_modules/@types/node/ts4.8/vm.d.ts create mode 100755 node_modules/@types/node/ts4.8/wasi.d.ts create mode 100755 node_modules/@types/node/ts4.8/worker_threads.d.ts create mode 100755 node_modules/@types/node/ts4.8/zlib.d.ts create mode 100755 node_modules/@types/node/tty.d.ts create mode 100755 node_modules/@types/node/url.d.ts create mode 100755 node_modules/@types/node/util.d.ts create mode 100755 node_modules/@types/node/v8.d.ts create mode 100755 node_modules/@types/node/vm.d.ts create mode 100755 node_modules/@types/node/wasi.d.ts create mode 100755 node_modules/@types/node/worker_threads.d.ts create mode 100755 node_modules/@types/node/zlib.d.ts create mode 100644 node_modules/aggregate-error/index.d.ts create mode 100644 node_modules/aggregate-error/index.js create mode 100644 node_modules/aggregate-error/license create mode 100644 node_modules/aggregate-error/package.json create mode 100644 node_modules/aggregate-error/readme.md create mode 100644 node_modules/asap/CHANGES.md create mode 100644 node_modules/asap/LICENSE.md create mode 100644 node_modules/asap/README.md create mode 100644 node_modules/asap/asap.js create mode 100644 node_modules/asap/browser-asap.js create mode 100644 node_modules/asap/browser-raw.js create mode 100644 node_modules/asap/package.json create mode 100644 node_modules/asap/raw.js create mode 100644 node_modules/asynckit/LICENSE create mode 100644 node_modules/asynckit/README.md create mode 100644 node_modules/asynckit/bench.js create mode 100644 node_modules/asynckit/index.js create mode 100644 node_modules/asynckit/lib/abort.js create mode 100644 node_modules/asynckit/lib/async.js create mode 100644 node_modules/asynckit/lib/defer.js create mode 100644 node_modules/asynckit/lib/iterate.js create mode 100644 node_modules/asynckit/lib/readable_asynckit.js create mode 100644 node_modules/asynckit/lib/readable_parallel.js create mode 100644 node_modules/asynckit/lib/readable_serial.js create mode 100644 node_modules/asynckit/lib/readable_serial_ordered.js create mode 100644 node_modules/asynckit/lib/state.js create mode 100644 node_modules/asynckit/lib/streamify.js create mode 100644 node_modules/asynckit/lib/terminator.js create mode 100644 node_modules/asynckit/package.json create mode 100644 node_modules/asynckit/parallel.js create mode 100644 node_modules/asynckit/serial.js create mode 100644 node_modules/asynckit/serialOrdered.js create mode 100644 node_modules/asynckit/stream.js create mode 100644 node_modules/basic-auth/HISTORY.md create mode 100644 node_modules/basic-auth/LICENSE create mode 100644 node_modules/basic-auth/README.md create mode 100644 node_modules/basic-auth/index.js create mode 100644 node_modules/basic-auth/package.json create mode 100644 node_modules/call-bind/.eslintignore create mode 100644 node_modules/call-bind/.eslintrc create mode 100644 node_modules/call-bind/.github/FUNDING.yml create mode 100644 node_modules/call-bind/.nycrc create mode 100644 node_modules/call-bind/CHANGELOG.md create mode 100644 node_modules/call-bind/LICENSE create mode 100644 node_modules/call-bind/README.md create mode 100644 node_modules/call-bind/callBound.js create mode 100644 node_modules/call-bind/index.js create mode 100644 node_modules/call-bind/package.json create mode 100644 node_modules/call-bind/test/callBound.js create mode 100644 node_modules/call-bind/test/index.js create mode 100644 node_modules/clean-stack/index.d.ts create mode 100644 node_modules/clean-stack/index.js create mode 100644 node_modules/clean-stack/license create mode 100644 node_modules/clean-stack/package.json create mode 100644 node_modules/clean-stack/readme.md create mode 100644 node_modules/combined-stream/License create mode 100644 node_modules/combined-stream/Readme.md create mode 100644 node_modules/combined-stream/lib/combined_stream.js create mode 100644 node_modules/combined-stream/package.json create mode 100644 node_modules/combined-stream/yarn.lock create mode 100644 node_modules/commander/CHANGELOG.md create mode 100644 node_modules/commander/LICENSE create mode 100644 node_modules/commander/Readme.md create mode 100644 node_modules/commander/index.js create mode 100644 node_modules/commander/package.json create mode 100644 node_modules/commander/typings/index.d.ts create mode 100644 node_modules/component-emitter/History.md create mode 100644 node_modules/component-emitter/LICENSE create mode 100644 node_modules/component-emitter/Readme.md create mode 100644 node_modules/component-emitter/index.js create mode 100644 node_modules/component-emitter/package.json create mode 100644 node_modules/cookiejar/LICENSE create mode 100644 node_modules/cookiejar/cookiejar.js create mode 100644 node_modules/cookiejar/package.json create mode 100644 node_modules/cookiejar/readme.md create mode 100644 node_modules/debug/.coveralls.yml create mode 100644 node_modules/debug/.eslintrc create mode 100644 node_modules/debug/.npmignore create mode 100644 node_modules/debug/.travis.yml create mode 100644 node_modules/debug/CHANGELOG.md create mode 100644 node_modules/debug/LICENSE create mode 100644 node_modules/debug/Makefile create mode 100644 node_modules/debug/README.md create mode 100644 node_modules/debug/component.json create mode 100644 node_modules/debug/karma.conf.js create mode 100644 node_modules/debug/node.js create mode 100644 node_modules/debug/package.json create mode 100644 node_modules/debug/src/browser.js create mode 100644 node_modules/debug/src/debug.js create mode 100644 node_modules/debug/src/index.js create mode 100644 node_modules/debug/src/inspector-log.js create mode 100644 node_modules/debug/src/node.js create mode 100644 node_modules/delayed-stream/.npmignore create mode 100644 node_modules/delayed-stream/License create mode 100644 node_modules/delayed-stream/Makefile create mode 100644 node_modules/delayed-stream/Readme.md create mode 100644 node_modules/delayed-stream/lib/delayed_stream.js create mode 100644 node_modules/delayed-stream/package.json create mode 100644 node_modules/depd/History.md create mode 100644 node_modules/depd/LICENSE create mode 100644 node_modules/depd/Readme.md create mode 100644 node_modules/depd/index.js create mode 100644 node_modules/depd/lib/browser/index.js create mode 100644 node_modules/depd/package.json create mode 100644 node_modules/deprecation/LICENSE create mode 100644 node_modules/deprecation/README.md create mode 100644 node_modules/deprecation/dist-node/index.js create mode 100644 node_modules/deprecation/dist-src/index.js create mode 100644 node_modules/deprecation/dist-types/index.d.ts create mode 100644 node_modules/deprecation/dist-web/index.js create mode 100644 node_modules/deprecation/package.json create mode 100644 node_modules/dezalgo/.travis.yml create mode 100644 node_modules/dezalgo/LICENSE create mode 100644 node_modules/dezalgo/README.md create mode 100644 node_modules/dezalgo/dezalgo.js create mode 100644 node_modules/dezalgo/package.json create mode 100644 node_modules/dezalgo/test/basic.js create mode 100644 node_modules/ee-first/LICENSE create mode 100644 node_modules/ee-first/README.md create mode 100644 node_modules/ee-first/index.js create mode 100644 node_modules/ee-first/package.json create mode 100644 node_modules/eventsource/.editorconfig create mode 100644 node_modules/eventsource/CONTRIBUTING.md create mode 100644 node_modules/eventsource/HISTORY.md create mode 100644 node_modules/eventsource/LICENSE create mode 100644 node_modules/eventsource/README.md create mode 100644 node_modules/eventsource/example/eventsource-polyfill.js create mode 100644 node_modules/eventsource/example/index.html create mode 100644 node_modules/eventsource/example/sse-client.js create mode 100644 node_modules/eventsource/example/sse-server.js create mode 100644 node_modules/eventsource/lib/eventsource-polyfill.js create mode 100644 node_modules/eventsource/lib/eventsource.js create mode 100644 node_modules/eventsource/package.json create mode 100644 node_modules/fast-safe-stringify/.travis.yml create mode 100644 node_modules/fast-safe-stringify/CHANGELOG.md create mode 100644 node_modules/fast-safe-stringify/LICENSE create mode 100644 node_modules/fast-safe-stringify/benchmark.js create mode 100644 node_modules/fast-safe-stringify/index.d.ts create mode 100644 node_modules/fast-safe-stringify/index.js create mode 100644 node_modules/fast-safe-stringify/package.json create mode 100644 node_modules/fast-safe-stringify/readme.md create mode 100644 node_modules/fast-safe-stringify/test-stable.js create mode 100644 node_modules/fast-safe-stringify/test.js create mode 100644 node_modules/form-data/License create mode 100644 node_modules/form-data/README.md.bak create mode 100644 node_modules/form-data/Readme.md create mode 100644 node_modules/form-data/index.d.ts create mode 100644 node_modules/form-data/lib/browser.js create mode 100644 node_modules/form-data/lib/form_data.js create mode 100644 node_modules/form-data/lib/populate.js create mode 100644 node_modules/form-data/package.json create mode 100644 node_modules/formidable/LICENSE create mode 100644 node_modules/formidable/README.md create mode 100644 node_modules/formidable/node_modules/qs/.editorconfig create mode 100644 node_modules/formidable/node_modules/qs/.eslintignore create mode 100644 node_modules/formidable/node_modules/qs/.eslintrc create mode 100644 node_modules/formidable/node_modules/qs/.github/FUNDING.yml create mode 100644 node_modules/formidable/node_modules/qs/.github/workflows/rebase.yml create mode 100644 node_modules/formidable/node_modules/qs/CHANGELOG.md create mode 100644 node_modules/formidable/node_modules/qs/LICENSE.md create mode 100644 node_modules/formidable/node_modules/qs/README.md create mode 100644 node_modules/formidable/node_modules/qs/dist/qs.js create mode 100644 node_modules/formidable/node_modules/qs/lib/formats.js create mode 100644 node_modules/formidable/node_modules/qs/lib/index.js create mode 100644 node_modules/formidable/node_modules/qs/lib/parse.js create mode 100644 node_modules/formidable/node_modules/qs/lib/stringify.js create mode 100644 node_modules/formidable/node_modules/qs/lib/utils.js create mode 100644 node_modules/formidable/node_modules/qs/package.json create mode 100644 node_modules/formidable/node_modules/qs/test/.eslintrc create mode 100644 node_modules/formidable/node_modules/qs/test/index.js create mode 100644 node_modules/formidable/node_modules/qs/test/parse.js create mode 100644 node_modules/formidable/node_modules/qs/test/stringify.js create mode 100644 node_modules/formidable/node_modules/qs/test/utils.js create mode 100644 node_modules/formidable/package.json create mode 100644 node_modules/formidable/src/Formidable.js create mode 100644 node_modules/formidable/src/FormidableError.js create mode 100644 node_modules/formidable/src/PersistentFile.js create mode 100644 node_modules/formidable/src/VolatileFile.js create mode 100644 node_modules/formidable/src/index.js create mode 100644 node_modules/formidable/src/parsers/Dummy.js create mode 100644 node_modules/formidable/src/parsers/JSON.js create mode 100644 node_modules/formidable/src/parsers/Multipart.js create mode 100644 node_modules/formidable/src/parsers/OctetStream.js create mode 100644 node_modules/formidable/src/parsers/Querystring.js create mode 100644 node_modules/formidable/src/parsers/StreamingQuerystring.js create mode 100644 node_modules/formidable/src/parsers/index.js create mode 100644 node_modules/formidable/src/plugins/index.js create mode 100644 node_modules/formidable/src/plugins/json.js create mode 100644 node_modules/formidable/src/plugins/multipart.js create mode 100644 node_modules/formidable/src/plugins/octetstream.js create mode 100644 node_modules/formidable/src/plugins/querystring.js create mode 100644 node_modules/function-bind/.editorconfig create mode 100644 node_modules/function-bind/.eslintrc create mode 100644 node_modules/function-bind/.jscs.json create mode 100644 node_modules/function-bind/.npmignore create mode 100644 node_modules/function-bind/.travis.yml create mode 100644 node_modules/function-bind/LICENSE create mode 100644 node_modules/function-bind/README.md create mode 100644 node_modules/function-bind/implementation.js create mode 100644 node_modules/function-bind/index.js create mode 100644 node_modules/function-bind/package.json create mode 100644 node_modules/function-bind/test/.eslintrc create mode 100644 node_modules/function-bind/test/index.js create mode 100644 node_modules/get-intrinsic/.eslintrc create mode 100644 node_modules/get-intrinsic/.github/FUNDING.yml create mode 100644 node_modules/get-intrinsic/.nycrc create mode 100644 node_modules/get-intrinsic/CHANGELOG.md create mode 100644 node_modules/get-intrinsic/LICENSE create mode 100644 node_modules/get-intrinsic/README.md create mode 100644 node_modules/get-intrinsic/index.js create mode 100644 node_modules/get-intrinsic/package.json create mode 100644 node_modules/get-intrinsic/test/GetIntrinsic.js create mode 120000 node_modules/github-auto-sync create mode 100644 node_modules/has-symbols/.eslintrc create mode 100644 node_modules/has-symbols/.github/FUNDING.yml create mode 100644 node_modules/has-symbols/.nycrc create mode 100644 node_modules/has-symbols/CHANGELOG.md create mode 100644 node_modules/has-symbols/LICENSE create mode 100644 node_modules/has-symbols/README.md create mode 100644 node_modules/has-symbols/index.js create mode 100644 node_modules/has-symbols/package.json create mode 100644 node_modules/has-symbols/shams.js create mode 100644 node_modules/has-symbols/test/index.js create mode 100644 node_modules/has-symbols/test/shams/core-js.js create mode 100644 node_modules/has-symbols/test/shams/get-own-property-symbols.js create mode 100644 node_modules/has-symbols/test/tests.js create mode 100644 node_modules/has/LICENSE-MIT create mode 100644 node_modules/has/README.md create mode 100644 node_modules/has/package.json create mode 100644 node_modules/has/src/index.js create mode 100644 node_modules/has/test/index.js create mode 100644 node_modules/hexoid/dist/index.js create mode 100644 node_modules/hexoid/dist/index.min.js create mode 100644 node_modules/hexoid/dist/index.mjs create mode 100644 node_modules/hexoid/hexoid.d.ts create mode 100644 node_modules/hexoid/license create mode 100644 node_modules/hexoid/package.json create mode 100644 node_modules/hexoid/readme.md create mode 100644 node_modules/indent-string/index.d.ts create mode 100644 node_modules/indent-string/index.js create mode 100644 node_modules/indent-string/license create mode 100644 node_modules/indent-string/package.json create mode 100644 node_modules/indent-string/readme.md create mode 100644 node_modules/inherits/LICENSE create mode 100644 node_modules/inherits/README.md create mode 100644 node_modules/inherits/inherits.js create mode 100644 node_modules/inherits/inherits_browser.js create mode 100644 node_modules/inherits/package.json create mode 100644 node_modules/lru-cache/LICENSE create mode 100644 node_modules/lru-cache/README.md create mode 100644 node_modules/lru-cache/index.js create mode 100644 node_modules/lru-cache/package.json create mode 100644 node_modules/methods/HISTORY.md create mode 100644 node_modules/methods/LICENSE create mode 100644 node_modules/methods/README.md create mode 100644 node_modules/methods/index.js create mode 100644 node_modules/methods/package.json create mode 100644 node_modules/mime-db/HISTORY.md create mode 100644 node_modules/mime-db/LICENSE create mode 100644 node_modules/mime-db/README.md create mode 100644 node_modules/mime-db/db.json create mode 100644 node_modules/mime-db/index.js create mode 100644 node_modules/mime-db/package.json create mode 100644 node_modules/mime-types/HISTORY.md create mode 100644 node_modules/mime-types/LICENSE create mode 100644 node_modules/mime-types/README.md create mode 100644 node_modules/mime-types/index.js create mode 100644 node_modules/mime-types/package.json create mode 100644 node_modules/mime/CHANGELOG.md create mode 100644 node_modules/mime/LICENSE create mode 100644 node_modules/mime/Mime.js create mode 100644 node_modules/mime/README.md create mode 100755 node_modules/mime/cli.js create mode 100644 node_modules/mime/index.js create mode 100644 node_modules/mime/lite.js create mode 100644 node_modules/mime/package.json create mode 100644 node_modules/mime/types/other.js create mode 100644 node_modules/mime/types/standard.js create mode 100644 node_modules/morgan/HISTORY.md create mode 100644 node_modules/morgan/LICENSE create mode 100644 node_modules/morgan/README.md create mode 100644 node_modules/morgan/index.js create mode 100644 node_modules/morgan/package.json create mode 100644 node_modules/ms/index.js create mode 100644 node_modules/ms/license.md create mode 100644 node_modules/ms/package.json create mode 100644 node_modules/ms/readme.md create mode 100644 node_modules/object-inspect/.eslintrc create mode 100644 node_modules/object-inspect/.github/FUNDING.yml create mode 100644 node_modules/object-inspect/.nycrc create mode 100644 node_modules/object-inspect/CHANGELOG.md create mode 100644 node_modules/object-inspect/LICENSE create mode 100644 node_modules/object-inspect/example/all.js create mode 100644 node_modules/object-inspect/example/circular.js create mode 100644 node_modules/object-inspect/example/fn.js create mode 100644 node_modules/object-inspect/example/inspect.js create mode 100644 node_modules/object-inspect/index.js create mode 100644 node_modules/object-inspect/package-support.json create mode 100644 node_modules/object-inspect/package.json create mode 100644 node_modules/object-inspect/readme.markdown create mode 100644 node_modules/object-inspect/test-core-js.js create mode 100644 node_modules/object-inspect/test/bigint.js create mode 100644 node_modules/object-inspect/test/browser/dom.js create mode 100644 node_modules/object-inspect/test/circular.js create mode 100644 node_modules/object-inspect/test/deep.js create mode 100644 node_modules/object-inspect/test/element.js create mode 100644 node_modules/object-inspect/test/err.js create mode 100644 node_modules/object-inspect/test/fakes.js create mode 100644 node_modules/object-inspect/test/fn.js create mode 100644 node_modules/object-inspect/test/has.js create mode 100644 node_modules/object-inspect/test/holes.js create mode 100644 node_modules/object-inspect/test/indent-option.js create mode 100644 node_modules/object-inspect/test/inspect.js create mode 100644 node_modules/object-inspect/test/lowbyte.js create mode 100644 node_modules/object-inspect/test/number.js create mode 100644 node_modules/object-inspect/test/quoteStyle.js create mode 100644 node_modules/object-inspect/test/toStringTag.js create mode 100644 node_modules/object-inspect/test/undef.js create mode 100644 node_modules/object-inspect/test/values.js create mode 100644 node_modules/object-inspect/util.inspect.js create mode 100644 node_modules/on-finished/HISTORY.md create mode 100644 node_modules/on-finished/LICENSE create mode 100644 node_modules/on-finished/README.md create mode 100644 node_modules/on-finished/index.js create mode 100644 node_modules/on-finished/package.json create mode 100644 node_modules/on-headers/HISTORY.md create mode 100644 node_modules/on-headers/LICENSE create mode 100644 node_modules/on-headers/README.md create mode 100644 node_modules/on-headers/index.js create mode 100644 node_modules/on-headers/package.json create mode 100644 node_modules/once/LICENSE create mode 100644 node_modules/once/README.md create mode 100644 node_modules/once/once.js create mode 100644 node_modules/once/package.json create mode 100644 node_modules/prefix/.npmignore create mode 100644 node_modules/prefix/LICENSE create mode 100644 node_modules/prefix/Makefile create mode 100644 node_modules/prefix/README.md create mode 100644 node_modules/prefix/component.json create mode 100644 node_modules/prefix/index.js create mode 100644 node_modules/prefix/package.json create mode 100644 node_modules/prefix/test/index.html create mode 100644 node_modules/prefix/test/prefix.spec.js create mode 100644 node_modules/qs/.editorconfig create mode 100644 node_modules/qs/.eslintrc create mode 100644 node_modules/qs/.github/FUNDING.yml create mode 100644 node_modules/qs/.nycrc create mode 100644 node_modules/qs/CHANGELOG.md create mode 100644 node_modules/qs/LICENSE.md create mode 100644 node_modules/qs/README.md create mode 100644 node_modules/qs/dist/qs.js create mode 100644 node_modules/qs/lib/formats.js create mode 100644 node_modules/qs/lib/index.js create mode 100644 node_modules/qs/lib/parse.js create mode 100644 node_modules/qs/lib/stringify.js create mode 100644 node_modules/qs/lib/utils.js create mode 100644 node_modules/qs/package.json create mode 100644 node_modules/qs/test/parse.js create mode 100644 node_modules/qs/test/stringify.js create mode 100644 node_modules/qs/test/utils.js create mode 100644 node_modules/readable-stream/CONTRIBUTING.md create mode 100644 node_modules/readable-stream/GOVERNANCE.md create mode 100644 node_modules/readable-stream/LICENSE create mode 100644 node_modules/readable-stream/README.md create mode 100644 node_modules/readable-stream/errors-browser.js create mode 100644 node_modules/readable-stream/errors.js create mode 100644 node_modules/readable-stream/experimentalWarning.js create mode 100644 node_modules/readable-stream/lib/_stream_duplex.js create mode 100644 node_modules/readable-stream/lib/_stream_passthrough.js create mode 100644 node_modules/readable-stream/lib/_stream_readable.js create mode 100644 node_modules/readable-stream/lib/_stream_transform.js create mode 100644 node_modules/readable-stream/lib/_stream_writable.js create mode 100644 node_modules/readable-stream/lib/internal/streams/async_iterator.js create mode 100644 node_modules/readable-stream/lib/internal/streams/buffer_list.js create mode 100644 node_modules/readable-stream/lib/internal/streams/destroy.js create mode 100644 node_modules/readable-stream/lib/internal/streams/end-of-stream.js create mode 100644 node_modules/readable-stream/lib/internal/streams/from-browser.js create mode 100644 node_modules/readable-stream/lib/internal/streams/from.js create mode 100644 node_modules/readable-stream/lib/internal/streams/pipeline.js create mode 100644 node_modules/readable-stream/lib/internal/streams/state.js create mode 100644 node_modules/readable-stream/lib/internal/streams/stream-browser.js create mode 100644 node_modules/readable-stream/lib/internal/streams/stream.js create mode 100644 node_modules/readable-stream/package.json create mode 100644 node_modules/readable-stream/readable-browser.js create mode 100644 node_modules/readable-stream/readable.js create mode 100644 node_modules/safe-buffer/LICENSE create mode 100644 node_modules/safe-buffer/README.md create mode 100644 node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/safe-buffer/index.js create mode 100644 node_modules/safe-buffer/package.json create mode 100644 node_modules/semver/LICENSE create mode 100644 node_modules/semver/README.md create mode 100755 node_modules/semver/bin/semver.js create mode 100644 node_modules/semver/classes/comparator.js create mode 100644 node_modules/semver/classes/index.js create mode 100644 node_modules/semver/classes/range.js create mode 100644 node_modules/semver/classes/semver.js create mode 100644 node_modules/semver/functions/clean.js create mode 100644 node_modules/semver/functions/cmp.js create mode 100644 node_modules/semver/functions/coerce.js create mode 100644 node_modules/semver/functions/compare-build.js create mode 100644 node_modules/semver/functions/compare-loose.js create mode 100644 node_modules/semver/functions/compare.js create mode 100644 node_modules/semver/functions/diff.js create mode 100644 node_modules/semver/functions/eq.js create mode 100644 node_modules/semver/functions/gt.js create mode 100644 node_modules/semver/functions/gte.js create mode 100644 node_modules/semver/functions/inc.js create mode 100644 node_modules/semver/functions/lt.js create mode 100644 node_modules/semver/functions/lte.js create mode 100644 node_modules/semver/functions/major.js create mode 100644 node_modules/semver/functions/minor.js create mode 100644 node_modules/semver/functions/neq.js create mode 100644 node_modules/semver/functions/parse.js create mode 100644 node_modules/semver/functions/patch.js create mode 100644 node_modules/semver/functions/prerelease.js create mode 100644 node_modules/semver/functions/rcompare.js create mode 100644 node_modules/semver/functions/rsort.js create mode 100644 node_modules/semver/functions/satisfies.js create mode 100644 node_modules/semver/functions/sort.js create mode 100644 node_modules/semver/functions/valid.js create mode 100644 node_modules/semver/index.js create mode 100644 node_modules/semver/internal/constants.js create mode 100644 node_modules/semver/internal/debug.js create mode 100644 node_modules/semver/internal/identifiers.js create mode 100644 node_modules/semver/internal/parse-options.js create mode 100644 node_modules/semver/internal/re.js create mode 100644 node_modules/semver/package.json create mode 100644 node_modules/semver/preload.js create mode 100644 node_modules/semver/range.bnf create mode 100644 node_modules/semver/ranges/gtr.js create mode 100644 node_modules/semver/ranges/intersects.js create mode 100644 node_modules/semver/ranges/ltr.js create mode 100644 node_modules/semver/ranges/max-satisfying.js create mode 100644 node_modules/semver/ranges/min-satisfying.js create mode 100644 node_modules/semver/ranges/min-version.js create mode 100644 node_modules/semver/ranges/outside.js create mode 100644 node_modules/semver/ranges/simplify.js create mode 100644 node_modules/semver/ranges/subset.js create mode 100644 node_modules/semver/ranges/to-comparators.js create mode 100644 node_modules/semver/ranges/valid.js create mode 100644 node_modules/side-channel/.eslintignore create mode 100644 node_modules/side-channel/.eslintrc create mode 100644 node_modules/side-channel/.github/FUNDING.yml create mode 100644 node_modules/side-channel/.nycrc create mode 100644 node_modules/side-channel/CHANGELOG.md create mode 100644 node_modules/side-channel/LICENSE create mode 100644 node_modules/side-channel/README.md create mode 100644 node_modules/side-channel/index.js create mode 100644 node_modules/side-channel/package.json create mode 100644 node_modules/side-channel/test/index.js create mode 100644 node_modules/smee-client/LICENSE create mode 100644 node_modules/smee-client/README.md create mode 100755 node_modules/smee-client/bin/smee.js create mode 100644 node_modules/smee-client/index.d.ts create mode 100644 node_modules/smee-client/index.js create mode 100644 node_modules/smee-client/node_modules/eventsource/.editorconfig create mode 100644 node_modules/smee-client/node_modules/eventsource/.travis.yml create mode 100644 node_modules/smee-client/node_modules/eventsource/CONTRIBUTING.md create mode 100644 node_modules/smee-client/node_modules/eventsource/HISTORY.md create mode 100644 node_modules/smee-client/node_modules/eventsource/LICENSE create mode 100644 node_modules/smee-client/node_modules/eventsource/README.md create mode 100644 node_modules/smee-client/node_modules/eventsource/example/eventsource-polyfill.js create mode 100644 node_modules/smee-client/node_modules/eventsource/example/index.html create mode 100644 node_modules/smee-client/node_modules/eventsource/example/sse-client.js create mode 100644 node_modules/smee-client/node_modules/eventsource/example/sse-server.js create mode 100644 node_modules/smee-client/node_modules/eventsource/lib/eventsource-polyfill.js create mode 100644 node_modules/smee-client/node_modules/eventsource/lib/eventsource.js create mode 100644 node_modules/smee-client/node_modules/eventsource/package.json create mode 100644 node_modules/smee-client/package.json create mode 100644 node_modules/string_decoder/LICENSE create mode 100644 node_modules/string_decoder/README.md create mode 100644 node_modules/string_decoder/lib/string_decoder.js create mode 100644 node_modules/string_decoder/node_modules/safe-buffer/LICENSE create mode 100644 node_modules/string_decoder/node_modules/safe-buffer/README.md create mode 100644 node_modules/string_decoder/node_modules/safe-buffer/index.d.ts create mode 100644 node_modules/string_decoder/node_modules/safe-buffer/index.js create mode 100644 node_modules/string_decoder/node_modules/safe-buffer/package.json create mode 100644 node_modules/string_decoder/package.json create mode 100644 node_modules/superagent/HISTORY.md create mode 100644 node_modules/superagent/LICENSE create mode 100644 node_modules/superagent/README.md create mode 100644 node_modules/superagent/dist/superagent.js create mode 100644 node_modules/superagent/dist/superagent.min.js create mode 100644 node_modules/superagent/lib/agent-base.js create mode 100644 node_modules/superagent/lib/client.js create mode 100644 node_modules/superagent/lib/node/agent.js create mode 100644 node_modules/superagent/lib/node/http2wrapper.js create mode 100644 node_modules/superagent/lib/node/index.js create mode 100644 node_modules/superagent/lib/node/parsers/image.js create mode 100644 node_modules/superagent/lib/node/parsers/index.js create mode 100644 node_modules/superagent/lib/node/parsers/json.js create mode 100644 node_modules/superagent/lib/node/parsers/text.js create mode 100644 node_modules/superagent/lib/node/parsers/urlencoded.js create mode 100644 node_modules/superagent/lib/node/response.js create mode 100644 node_modules/superagent/lib/node/unzip.js create mode 100644 node_modules/superagent/lib/request-base.js create mode 100644 node_modules/superagent/lib/response-base.js create mode 100644 node_modules/superagent/lib/utils.js create mode 100644 node_modules/superagent/node_modules/debug/LICENSE create mode 100644 node_modules/superagent/node_modules/debug/README.md create mode 100644 node_modules/superagent/node_modules/debug/package.json create mode 100644 node_modules/superagent/node_modules/debug/src/browser.js create mode 100644 node_modules/superagent/node_modules/debug/src/common.js create mode 100644 node_modules/superagent/node_modules/debug/src/index.js create mode 100644 node_modules/superagent/node_modules/debug/src/node.js create mode 100644 node_modules/superagent/node_modules/ms/index.js create mode 100644 node_modules/superagent/node_modules/ms/license.md create mode 100644 node_modules/superagent/node_modules/ms/package.json create mode 100644 node_modules/superagent/node_modules/ms/readme.md create mode 100644 node_modules/superagent/package.json create mode 100644 node_modules/util-deprecate/History.md create mode 100644 node_modules/util-deprecate/LICENSE create mode 100644 node_modules/util-deprecate/README.md create mode 100644 node_modules/util-deprecate/browser.js create mode 100644 node_modules/util-deprecate/node.js create mode 100644 node_modules/util-deprecate/package.json create mode 100644 node_modules/validator/LICENSE create mode 100644 node_modules/validator/README.md create mode 100644 node_modules/validator/es/index.js create mode 100644 node_modules/validator/es/lib/alpha.js create mode 100644 node_modules/validator/es/lib/blacklist.js create mode 100644 node_modules/validator/es/lib/contains.js create mode 100644 node_modules/validator/es/lib/equals.js create mode 100644 node_modules/validator/es/lib/escape.js create mode 100644 node_modules/validator/es/lib/isAfter.js create mode 100644 node_modules/validator/es/lib/isAlpha.js create mode 100644 node_modules/validator/es/lib/isAlphanumeric.js create mode 100644 node_modules/validator/es/lib/isAscii.js create mode 100644 node_modules/validator/es/lib/isBIC.js create mode 100644 node_modules/validator/es/lib/isBase32.js create mode 100644 node_modules/validator/es/lib/isBase58.js create mode 100644 node_modules/validator/es/lib/isBase64.js create mode 100644 node_modules/validator/es/lib/isBefore.js create mode 100644 node_modules/validator/es/lib/isBoolean.js create mode 100644 node_modules/validator/es/lib/isBtcAddress.js create mode 100644 node_modules/validator/es/lib/isByteLength.js create mode 100644 node_modules/validator/es/lib/isCreditCard.js create mode 100755 node_modules/validator/es/lib/isCurrency.js create mode 100644 node_modules/validator/es/lib/isDataURI.js create mode 100644 node_modules/validator/es/lib/isDate.js create mode 100644 node_modules/validator/es/lib/isDecimal.js create mode 100644 node_modules/validator/es/lib/isDivisibleBy.js create mode 100644 node_modules/validator/es/lib/isEAN.js create mode 100644 node_modules/validator/es/lib/isEmail.js create mode 100644 node_modules/validator/es/lib/isEmpty.js create mode 100644 node_modules/validator/es/lib/isEthereumAddress.js create mode 100644 node_modules/validator/es/lib/isFQDN.js create mode 100644 node_modules/validator/es/lib/isFloat.js create mode 100644 node_modules/validator/es/lib/isFullWidth.js create mode 100644 node_modules/validator/es/lib/isHSL.js create mode 100644 node_modules/validator/es/lib/isHalfWidth.js create mode 100644 node_modules/validator/es/lib/isHash.js create mode 100644 node_modules/validator/es/lib/isHexColor.js create mode 100644 node_modules/validator/es/lib/isHexadecimal.js create mode 100644 node_modules/validator/es/lib/isIBAN.js create mode 100644 node_modules/validator/es/lib/isIMEI.js create mode 100644 node_modules/validator/es/lib/isIP.js create mode 100644 node_modules/validator/es/lib/isIPRange.js create mode 100644 node_modules/validator/es/lib/isISBN.js create mode 100644 node_modules/validator/es/lib/isISIN.js create mode 100644 node_modules/validator/es/lib/isISO31661Alpha2.js create mode 100644 node_modules/validator/es/lib/isISO31661Alpha3.js create mode 100644 node_modules/validator/es/lib/isISO4217.js create mode 100644 node_modules/validator/es/lib/isISO8601.js create mode 100644 node_modules/validator/es/lib/isISRC.js create mode 100644 node_modules/validator/es/lib/isISSN.js create mode 100644 node_modules/validator/es/lib/isIdentityCard.js create mode 100644 node_modules/validator/es/lib/isIn.js create mode 100644 node_modules/validator/es/lib/isInt.js create mode 100644 node_modules/validator/es/lib/isJSON.js create mode 100644 node_modules/validator/es/lib/isJWT.js create mode 100644 node_modules/validator/es/lib/isLatLong.js create mode 100644 node_modules/validator/es/lib/isLength.js create mode 100644 node_modules/validator/es/lib/isLicensePlate.js create mode 100644 node_modules/validator/es/lib/isLocale.js create mode 100644 node_modules/validator/es/lib/isLowercase.js create mode 100644 node_modules/validator/es/lib/isMACAddress.js create mode 100644 node_modules/validator/es/lib/isMD5.js create mode 100644 node_modules/validator/es/lib/isMagnetURI.js create mode 100644 node_modules/validator/es/lib/isMimeType.js create mode 100644 node_modules/validator/es/lib/isMobilePhone.js create mode 100644 node_modules/validator/es/lib/isMongoId.js create mode 100644 node_modules/validator/es/lib/isMultibyte.js create mode 100644 node_modules/validator/es/lib/isNumeric.js create mode 100644 node_modules/validator/es/lib/isOctal.js create mode 100644 node_modules/validator/es/lib/isPassportNumber.js create mode 100644 node_modules/validator/es/lib/isPort.js create mode 100644 node_modules/validator/es/lib/isPostalCode.js create mode 100644 node_modules/validator/es/lib/isRFC3339.js create mode 100644 node_modules/validator/es/lib/isRgbColor.js create mode 100644 node_modules/validator/es/lib/isSemVer.js create mode 100644 node_modules/validator/es/lib/isSlug.js create mode 100644 node_modules/validator/es/lib/isStrongPassword.js create mode 100644 node_modules/validator/es/lib/isSurrogatePair.js create mode 100644 node_modules/validator/es/lib/isTaxID.js create mode 100644 node_modules/validator/es/lib/isURL.js create mode 100644 node_modules/validator/es/lib/isUUID.js create mode 100644 node_modules/validator/es/lib/isUppercase.js create mode 100644 node_modules/validator/es/lib/isVAT.js create mode 100644 node_modules/validator/es/lib/isVariableWidth.js create mode 100644 node_modules/validator/es/lib/isWhitelisted.js create mode 100644 node_modules/validator/es/lib/ltrim.js create mode 100644 node_modules/validator/es/lib/matches.js create mode 100644 node_modules/validator/es/lib/normalizeEmail.js create mode 100644 node_modules/validator/es/lib/rtrim.js create mode 100644 node_modules/validator/es/lib/stripLow.js create mode 100644 node_modules/validator/es/lib/toBoolean.js create mode 100644 node_modules/validator/es/lib/toDate.js create mode 100644 node_modules/validator/es/lib/toFloat.js create mode 100644 node_modules/validator/es/lib/toInt.js create mode 100644 node_modules/validator/es/lib/trim.js create mode 100644 node_modules/validator/es/lib/unescape.js create mode 100644 node_modules/validator/es/lib/util/algorithms.js create mode 100644 node_modules/validator/es/lib/util/assertString.js create mode 100644 node_modules/validator/es/lib/util/includes.js create mode 100644 node_modules/validator/es/lib/util/merge.js create mode 100644 node_modules/validator/es/lib/util/multilineRegex.js create mode 100644 node_modules/validator/es/lib/util/toString.js create mode 100644 node_modules/validator/es/lib/util/typeOf.js create mode 100644 node_modules/validator/es/lib/whitelist.js create mode 100644 node_modules/validator/index.js create mode 100644 node_modules/validator/lib/alpha.js create mode 100644 node_modules/validator/lib/blacklist.js create mode 100644 node_modules/validator/lib/contains.js create mode 100644 node_modules/validator/lib/equals.js create mode 100644 node_modules/validator/lib/escape.js create mode 100644 node_modules/validator/lib/isAfter.js create mode 100644 node_modules/validator/lib/isAlpha.js create mode 100644 node_modules/validator/lib/isAlphanumeric.js create mode 100644 node_modules/validator/lib/isAscii.js create mode 100644 node_modules/validator/lib/isBIC.js create mode 100644 node_modules/validator/lib/isBase32.js create mode 100644 node_modules/validator/lib/isBase58.js create mode 100644 node_modules/validator/lib/isBase64.js create mode 100644 node_modules/validator/lib/isBefore.js create mode 100644 node_modules/validator/lib/isBoolean.js create mode 100644 node_modules/validator/lib/isBtcAddress.js create mode 100644 node_modules/validator/lib/isByteLength.js create mode 100644 node_modules/validator/lib/isCreditCard.js create mode 100755 node_modules/validator/lib/isCurrency.js create mode 100644 node_modules/validator/lib/isDataURI.js create mode 100644 node_modules/validator/lib/isDate.js create mode 100644 node_modules/validator/lib/isDecimal.js create mode 100644 node_modules/validator/lib/isDivisibleBy.js create mode 100644 node_modules/validator/lib/isEAN.js create mode 100644 node_modules/validator/lib/isEmail.js create mode 100644 node_modules/validator/lib/isEmpty.js create mode 100644 node_modules/validator/lib/isEthereumAddress.js create mode 100644 node_modules/validator/lib/isFQDN.js create mode 100644 node_modules/validator/lib/isFloat.js create mode 100644 node_modules/validator/lib/isFullWidth.js create mode 100644 node_modules/validator/lib/isHSL.js create mode 100644 node_modules/validator/lib/isHalfWidth.js create mode 100644 node_modules/validator/lib/isHash.js create mode 100644 node_modules/validator/lib/isHexColor.js create mode 100644 node_modules/validator/lib/isHexadecimal.js create mode 100644 node_modules/validator/lib/isIBAN.js create mode 100644 node_modules/validator/lib/isIMEI.js create mode 100644 node_modules/validator/lib/isIP.js create mode 100644 node_modules/validator/lib/isIPRange.js create mode 100644 node_modules/validator/lib/isISBN.js create mode 100644 node_modules/validator/lib/isISIN.js create mode 100644 node_modules/validator/lib/isISO31661Alpha2.js create mode 100644 node_modules/validator/lib/isISO31661Alpha3.js create mode 100644 node_modules/validator/lib/isISO4217.js create mode 100644 node_modules/validator/lib/isISO8601.js create mode 100644 node_modules/validator/lib/isISRC.js create mode 100644 node_modules/validator/lib/isISSN.js create mode 100644 node_modules/validator/lib/isIdentityCard.js create mode 100644 node_modules/validator/lib/isIn.js create mode 100644 node_modules/validator/lib/isInt.js create mode 100644 node_modules/validator/lib/isJSON.js create mode 100644 node_modules/validator/lib/isJWT.js create mode 100644 node_modules/validator/lib/isLatLong.js create mode 100644 node_modules/validator/lib/isLength.js create mode 100644 node_modules/validator/lib/isLicensePlate.js create mode 100644 node_modules/validator/lib/isLocale.js create mode 100644 node_modules/validator/lib/isLowercase.js create mode 100644 node_modules/validator/lib/isMACAddress.js create mode 100644 node_modules/validator/lib/isMD5.js create mode 100644 node_modules/validator/lib/isMagnetURI.js create mode 100644 node_modules/validator/lib/isMimeType.js create mode 100644 node_modules/validator/lib/isMobilePhone.js create mode 100644 node_modules/validator/lib/isMongoId.js create mode 100644 node_modules/validator/lib/isMultibyte.js create mode 100644 node_modules/validator/lib/isNumeric.js create mode 100644 node_modules/validator/lib/isOctal.js create mode 100644 node_modules/validator/lib/isPassportNumber.js create mode 100644 node_modules/validator/lib/isPort.js create mode 100644 node_modules/validator/lib/isPostalCode.js create mode 100644 node_modules/validator/lib/isRFC3339.js create mode 100644 node_modules/validator/lib/isRgbColor.js create mode 100644 node_modules/validator/lib/isSemVer.js create mode 100644 node_modules/validator/lib/isSlug.js create mode 100644 node_modules/validator/lib/isStrongPassword.js create mode 100644 node_modules/validator/lib/isSurrogatePair.js create mode 100644 node_modules/validator/lib/isTaxID.js create mode 100644 node_modules/validator/lib/isURL.js create mode 100644 node_modules/validator/lib/isUUID.js create mode 100644 node_modules/validator/lib/isUppercase.js create mode 100644 node_modules/validator/lib/isVAT.js create mode 100644 node_modules/validator/lib/isVariableWidth.js create mode 100644 node_modules/validator/lib/isWhitelisted.js create mode 100644 node_modules/validator/lib/ltrim.js create mode 100644 node_modules/validator/lib/matches.js create mode 100644 node_modules/validator/lib/normalizeEmail.js create mode 100644 node_modules/validator/lib/rtrim.js create mode 100644 node_modules/validator/lib/stripLow.js create mode 100644 node_modules/validator/lib/toBoolean.js create mode 100644 node_modules/validator/lib/toDate.js create mode 100644 node_modules/validator/lib/toFloat.js create mode 100644 node_modules/validator/lib/toInt.js create mode 100644 node_modules/validator/lib/trim.js create mode 100644 node_modules/validator/lib/unescape.js create mode 100644 node_modules/validator/lib/util/algorithms.js create mode 100644 node_modules/validator/lib/util/assertString.js create mode 100644 node_modules/validator/lib/util/includes.js create mode 100644 node_modules/validator/lib/util/merge.js create mode 100644 node_modules/validator/lib/util/multilineRegex.js create mode 100644 node_modules/validator/lib/util/toString.js create mode 100644 node_modules/validator/lib/util/typeOf.js create mode 100644 node_modules/validator/lib/whitelist.js create mode 100644 node_modules/validator/package.json create mode 100644 node_modules/validator/validator.js create mode 100644 node_modules/validator/validator.min.js create mode 100644 node_modules/wrappy/LICENSE create mode 100644 node_modules/wrappy/README.md create mode 100644 node_modules/wrappy/package.json create mode 100644 node_modules/wrappy/wrappy.js create mode 100644 node_modules/yallist/LICENSE create mode 100644 node_modules/yallist/README.md create mode 100644 node_modules/yallist/iterator.js create mode 100644 node_modules/yallist/package.json create mode 100644 node_modules/yallist/yallist.js create mode 100644 package-lock.json create mode 100644 package.json diff --git a/node_modules/.bin/mime b/node_modules/.bin/mime new file mode 120000 index 0000000..fbb7ee0 --- /dev/null +++ b/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 0000000..5aaadf4 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/.bin/smee b/node_modules/.bin/smee new file mode 120000 index 0000000..e705766 --- /dev/null +++ b/node_modules/.bin/smee @@ -0,0 +1 @@ +../smee-client/bin/smee.js \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..d007e54 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,599 @@ +{ + "name": "flutterflow-socket-example", + "lockfileVersion": 2, + "requires": true, + "packages": { + "node_modules/@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "node_modules/@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "dependencies": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "dependencies": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "node_modules/@types/node": { + "version": "18.11.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.3.tgz", + "integrity": "sha512-fNjDQzzOsZeKZu5NATgXUPsaFaTxeRgFXoosrHivTl8RGeV733OLawXsGfEk9a8/tySyZUyiZ6E8LcjPFZ2y1A==", + "dev": true + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "node_modules/cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "dependencies": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formidable/node_modules/qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/github-auto-sync": { + "resolved": "tools/github", + "link": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "engines": { + "node": ">=8" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prefix/-/prefix-1.0.0.tgz", + "integrity": "sha512-KtGt8TV4nQkYgO6wJ5phkBGAECZW5cs5vQhqnz1LlxGGd8y4LwpW3Bq5pofULjdiMXACDCxisDUhwtFXb7vHfQ==" + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "dependencies": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "bin": { + "smee": "bin/smee.js" + } + }, + "node_modules/smee-client/node_modules/eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/superagent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "tools/github": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + } + } + } +} diff --git a/node_modules/@octokit/openapi-types/LICENSE b/node_modules/@octokit/openapi-types/LICENSE new file mode 100644 index 0000000..c61fbbe --- /dev/null +++ b/node_modules/@octokit/openapi-types/LICENSE @@ -0,0 +1,7 @@ +Copyright 2020 Gregor Martynus + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@octokit/openapi-types/README.md b/node_modules/@octokit/openapi-types/README.md new file mode 100644 index 0000000..9da833c --- /dev/null +++ b/node_modules/@octokit/openapi-types/README.md @@ -0,0 +1,17 @@ +# @octokit/openapi-types + +> Generated TypeScript definitions based on GitHub's OpenAPI spec + +This package is continously updated based on [GitHub's OpenAPI specification](https://github.com/github/rest-api-description/) + +## Usage + +```ts +import { components } from "@octokit/openapi-types"; + +type Repository = components["schemas"]["full-repository"]; +``` + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/openapi-types/package.json b/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 0000000..3983580 --- /dev/null +++ b/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,20 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public" + }, + "version": "14.0.0", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "8.0.0" + } +} diff --git a/node_modules/@octokit/openapi-types/types.d.ts b/node_modules/@octokit/openapi-types/types.d.ts new file mode 100644 index 0000000..f4bd1f0 --- /dev/null +++ b/node_modules/@octokit/openapi-types/types.d.ts @@ -0,0 +1,46693 @@ +/** + * This file was auto-generated by openapi-typescript. + * Do not make direct changes to the file. + */ + +export interface paths { + "/": { + /** Get Hypermedia links to resources accessible in GitHub's REST API */ + get: operations["meta/root"]; + }; + "/app": { + /** + * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-authenticated"]; + }; + "/app-manifests/{code}/conversions": { + /** Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ + post: operations["apps/create-from-manifest"]; + }; + "/app/hook/config": { + /** + * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-webhook-config-for-app"]; + /** + * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + patch: operations["apps/update-webhook-config-for-app"]; + }; + "/app/hook/deliveries": { + /** + * Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/list-webhook-deliveries"]; + }; + "/app/hook/deliveries/{delivery_id}": { + /** + * Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-webhook-delivery"]; + }; + "/app/hook/deliveries/{delivery_id}/attempts": { + /** + * Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + post: operations["apps/redeliver-webhook-delivery"]; + }; + "/app/installations": { + /** + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + get: operations["apps/list-installations"]; + }; + "/app/installations/{installation_id}": { + /** + * Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-installation"]; + /** + * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + delete: operations["apps/delete-installation"]; + }; + "/app/installations/{installation_id}/access_tokens": { + /** + * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + post: operations["apps/create-installation-access-token"]; + }; + "/app/installations/{installation_id}/suspended": { + /** + * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + put: operations["apps/suspend-installation"]; + /** + * Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + delete: operations["apps/unsuspend-installation"]; + }; + "/applications/{client_id}/grant": { + /** + * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + delete: operations["apps/delete-authorization"]; + }; + "/applications/{client_id}/token": { + /** OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. */ + post: operations["apps/check-token"]; + /** OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. */ + delete: operations["apps/delete-token"]; + /** OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + patch: operations["apps/reset-token"]; + }; + "/applications/{client_id}/token/scoped": { + /** Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + post: operations["apps/scope-token"]; + }; + "/apps/{app_slug}": { + /** + * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + get: operations["apps/get-by-slug"]; + }; + "/codes_of_conduct": { + get: operations["codes-of-conduct/get-all-codes-of-conduct"]; + }; + "/codes_of_conduct/{key}": { + get: operations["codes-of-conduct/get-conduct-code"]; + }; + "/emojis": { + /** Lists all the emojis available to use on GitHub. */ + get: operations["emojis/get"]; + }; + "/enterprise-installation/{enterprise_or_org}/server-statistics": { + /** + * Returns aggregate usage metrics for your GitHub Enterprise Server 3.5+ instance for a specified time period up to 365 days. + * + * To use this endpoint, your GitHub Enterprise Server instance must be connected to GitHub Enterprise Cloud using GitHub Connect. You must enable Server Statistics, and for the API request provide your enterprise account name or organization name connected to the GitHub Enterprise Server. For more information, see "[Enabling Server Statistics for your enterprise](/admin/configuration/configuring-github-connect/enabling-server-statistics-for-your-enterprise)" in the GitHub Enterprise Server documentation. + * + * You'll need to use a personal access token: + * - If you connected your GitHub Enterprise Server to an enterprise account and enabled Server Statistics, you'll need a personal access token with the `read:enterprise` permission. + * - If you connected your GitHub Enterprise Server to an organization account and enabled Server Statistics, you'll need a personal access token with the `read:org` permission. + * + * For more information on creating a personal access token, see "[Creating a personal access token](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." + */ + get: operations["enterprise-admin/get-server-statistics"]; + }; + "/enterprises/{enterprise}/actions/cache/usage": { + /** + * Gets the total GitHub Actions cache usage for an enterprise. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions": { + /** + * Gets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-github-actions-permissions-enterprise"]; + /** + * Sets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-github-actions-permissions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/organizations": { + /** + * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise"]; + /** + * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/organizations/{org_id}": { + /** + * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/enable-selected-organization-github-actions-enterprise"]; + /** + * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/disable-selected-organization-github-actions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/selected-actions": { + /** + * Gets the selected actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-allowed-actions-enterprise"]; + /** + * Sets the actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-allowed-actions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/permissions/workflow": { + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-enterprise"]; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, and sets + * whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups": { + /** + * Lists all self-hosted runner groups for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-self-hosted-runner-groups-for-enterprise"]; + /** + * Creates a new self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + post: operations["enterprise-admin/create-self-hosted-runner-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": { + /** + * Gets a specific self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-self-hosted-runner-group-for-enterprise"]; + /** + * Deletes a self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/delete-self-hosted-runner-group-from-enterprise"]; + /** + * Updates the `name` and `visibility` of a self-hosted runner group in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + patch: operations["enterprise-admin/update-self-hosted-runner-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": { + /** + * Lists the organizations with access to a self-hosted runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise"]; + /** + * Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": { + /** + * Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise"]; + /** + * Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": { + /** + * Lists the self-hosted runners that are in a specific enterprise group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-self-hosted-runners-in-group-for-enterprise"]; + /** + * Replaces the list of self-hosted runners that are part of an enterprise runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-self-hosted-runners-in-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": { + /** + * Adds a self-hosted runner to a runner group configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` + * scope to use this endpoint. + */ + put: operations["enterprise-admin/add-self-hosted-runner-to-group-for-enterprise"]; + /** + * Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners": { + /** + * Lists all self-hosted runners configured for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-self-hosted-runners-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/downloads": { + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-runner-applications-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/registration-token": { + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/enterprises/octo-enterprise --token TOKEN + * ``` + */ + post: operations["enterprise-admin/create-registration-token-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/remove-token": { + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["enterprise-admin/create-remove-token-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/{runner_id}": { + /** + * Gets a specific self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/get-self-hosted-runner-for-enterprise"]; + /** + * Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/delete-self-hosted-runner-from-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/{runner_id}/labels": { + /** + * Lists all labels for a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + get: operations["enterprise-admin/list-labels-for-self-hosted-runner-for-enterprise"]; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + put: operations["enterprise-admin/set-custom-labels-for-self-hosted-runner-for-enterprise"]; + /** + * Add custom labels to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + post: operations["enterprise-admin/add-custom-labels-to-self-hosted-runner-for-enterprise"]; + /** + * Remove all custom labels from a self-hosted runner configured in an + * enterprise. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-all-custom-labels-from-self-hosted-runner-for-enterprise"]; + }; + "/enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner configured + * in an enterprise. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + delete: operations["enterprise-admin/remove-custom-label-from-self-hosted-runner-for-enterprise"]; + }; + "/enterprises/{enterprise}/code-scanning/alerts": { + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an enterprise. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be a member of the enterprise, + * and you must use an access token with the `repo` scope or `security_events` scope. + */ + get: operations["code-scanning/list-alerts-for-enterprise"]; + }; + "/enterprises/{enterprise}/secret-scanning/alerts": { + /** + * Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + get: operations["secret-scanning/list-alerts-for-enterprise"]; + }; + "/enterprises/{enterprise}/settings/billing/advanced-security": { + /** + * Gets the GitHub Advanced Security active committers for an enterprise per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of active_users for each repository. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + get: operations["billing/get-github-advanced-security-billing-ghe"]; + }; + "/events": { + /** We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. */ + get: operations["activity/list-public-events"]; + }; + "/feeds": { + /** + * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + get: operations["activity/get-feeds"]; + }; + "/gists": { + /** Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: */ + get: operations["gists/list"]; + /** + * Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + post: operations["gists/create"]; + }; + "/gists/public": { + /** + * List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + get: operations["gists/list-public"]; + }; + "/gists/starred": { + /** List the authenticated user's starred gists: */ + get: operations["gists/list-starred"]; + }; + "/gists/{gist_id}": { + get: operations["gists/get"]; + delete: operations["gists/delete"]; + /** Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. */ + patch: operations["gists/update"]; + }; + "/gists/{gist_id}/comments": { + get: operations["gists/list-comments"]; + post: operations["gists/create-comment"]; + }; + "/gists/{gist_id}/comments/{comment_id}": { + get: operations["gists/get-comment"]; + delete: operations["gists/delete-comment"]; + patch: operations["gists/update-comment"]; + }; + "/gists/{gist_id}/commits": { + get: operations["gists/list-commits"]; + }; + "/gists/{gist_id}/forks": { + get: operations["gists/list-forks"]; + post: operations["gists/fork"]; + }; + "/gists/{gist_id}/star": { + get: operations["gists/check-is-starred"]; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + put: operations["gists/star"]; + delete: operations["gists/unstar"]; + }; + "/gists/{gist_id}/{sha}": { + get: operations["gists/get-revision"]; + }; + "/gitignore/templates": { + /** List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). */ + get: operations["gitignore/get-all-templates"]; + }; + "/gitignore/templates/{name}": { + /** + * The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + get: operations["gitignore/get-template"]; + }; + "/installation/repositories": { + /** + * List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + get: operations["apps/list-repos-accessible-to-installation"]; + }; + "/installation/token": { + /** + * Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + delete: operations["apps/revoke-installation-access-token"]; + }; + "/issues": { + /** + * List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list"]; + }; + "/licenses": { + get: operations["licenses/get-all-commonly-used"]; + }; + "/licenses/{license}": { + get: operations["licenses/get"]; + }; + "/markdown": { + post: operations["markdown/render"]; + }; + "/markdown/raw": { + /** You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. */ + post: operations["markdown/render-raw"]; + }; + "/marketplace_listing/accounts/{account_id}": { + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/get-subscription-plan-for-account"]; + }; + "/marketplace_listing/plans": { + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-plans"]; + }; + "/marketplace_listing/plans/{plan_id}/accounts": { + /** + * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-accounts-for-plan"]; + }; + "/marketplace_listing/stubbed/accounts/{account_id}": { + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/get-subscription-plan-for-account-stubbed"]; + }; + "/marketplace_listing/stubbed/plans": { + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-plans-stubbed"]; + }; + "/marketplace_listing/stubbed/plans/{plan_id}/accounts": { + /** + * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + get: operations["apps/list-accounts-for-plan-stubbed"]; + }; + "/meta": { + /** + * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. + */ + get: operations["meta/get"]; + }; + "/networks/{owner}/{repo}/events": { + get: operations["activity/list-public-events-for-repo-network"]; + }; + "/notifications": { + /** List all notifications for the current user, sorted by most recently updated. */ + get: operations["activity/list-notifications-for-authenticated-user"]; + /** Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + put: operations["activity/mark-notifications-as-read"]; + }; + "/notifications/threads/{thread_id}": { + get: operations["activity/get-thread"]; + patch: operations["activity/mark-thread-as-read"]; + }; + "/notifications/threads/{thread_id}/subscription": { + /** + * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + get: operations["activity/get-thread-subscription-for-authenticated-user"]; + /** + * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + put: operations["activity/set-thread-subscription"]; + /** Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. */ + delete: operations["activity/delete-thread-subscription"]; + }; + "/octocat": { + /** Get the octocat as ASCII art */ + get: operations["meta/get-octocat"]; + }; + "/organizations": { + /** + * Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. + */ + get: operations["orgs/list"]; + }; + "/organizations/{organization_id}/custom_roles": { + /** + * List the custom repository roles available in this organization. In order to see custom + * repository roles in an organization, the authenticated user must be an organization owner. + * + * To use this endpoint the authenticated user must be an administrator for the organization or of an repository of the organizaiton and must use an access token with `admin:org repo` scope. + * GitHub Apps must have the `organization_custom_roles:read` organization permission to use this endpoint. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)". + */ + get: operations["orgs/list-custom-roles"]; + }; + "/organizations/{org}/codespaces/secrets": { + /** + * Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/list-org-secrets"]; + }; + "/organizations/{org}/codespaces/secrets/public-key": { + /** Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + get: operations["codespaces/get-org-public-key"]; + }; + "/organizations/{org}/codespaces/secrets/{secret_name}": { + /** + * Gets an organization secret without revealing its encrypted value. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/get-org-secret"]; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * // Written with ❤️ by PSJ and free to use under The Unlicense. + * const sodium=require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with secret before running the script. + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key. + * + * //Check if libsodium is ready and then proceed. + * + * sodium.ready.then( ()=>{ + * + * // Convert Secret & Base64 key to Uint8Array. + * let binkey= sodium.from_base64(key, sodium.base64_variants.ORIGINAL) //Equivalent of Buffer.from(key, 'base64') + * let binsec= sodium.from_string(secret) // Equivalent of Buffer.from(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes= sodium.crypto_box_seal(binsec,binkey) // Similar to tweetsodium.seal(binsec,binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output=sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) //Equivalent of Buffer.from(encBytes).toString('base64') + * + * console.log(output) + * }); + * ``` + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-org-secret"]; + /** Deletes an organization secret using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + delete: operations["codespaces/delete-org-secret"]; + }; + "/organizations/{org}/codespaces/secrets/{secret_name}/repositories": { + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + get: operations["codespaces/list-selected-repos-for-org-secret"]; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + put: operations["codespaces/set-selected-repos-for-org-secret"]; + }; + "/organizations/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": { + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + put: operations["codespaces/add-selected-repo-to-org-secret"]; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + delete: operations["codespaces/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}": { + /** + * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + get: operations["orgs/get"]; + /** + * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. + */ + patch: operations["orgs/update"]; + }; + "/orgs/{org}/actions/cache/usage": { + /** + * Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-for-org"]; + }; + "/orgs/{org}/actions/cache/usage-by-repository": { + /** + * Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage-by-repo-for-org"]; + }; + "/orgs/{org}/actions/permissions": { + /** + * Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-github-actions-permissions-organization"]; + /** + * Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-github-actions-permissions-organization"]; + }; + "/orgs/{org}/actions/permissions/repositories": { + /** + * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/list-selected-repositories-enabled-github-actions-organization"]; + /** + * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-selected-repositories-enabled-github-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/repositories/{repository_id}": { + /** + * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/enable-selected-repository-github-actions-organization"]; + /** + * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + delete: operations["actions/disable-selected-repository-github-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/selected-actions": { + /** + * Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-allowed-actions-organization"]; + /** + * Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * If the organization belongs to an enterprise that has `selected` actions and reusable workflows set at the enterprise level, then you cannot override any of the enterprise's allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-allowed-actions-organization"]; + }; + "/orgs/{org}/actions/permissions/workflow": { + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-organization"]; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-organization"]; + }; + "/orgs/{org}/actions/runner-groups": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists all self-hosted runner groups configured in an organization and inherited from an enterprise. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runner-groups-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Creates a new self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["actions/create-self-hosted-runner-group-for-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Gets a specific self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/get-self-hosted-runner-group-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Deletes a self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-group-from-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Updates the `name` and `visibility` of a self-hosted runner group in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + patch: operations["actions/update-self-hosted-runner-group-for-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists the repositories with access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-repo-access-to-self-hosted-runner-group-in-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of repositories that have access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-repo-access-to-self-hosted-runner-group-in-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-repo-access-to-self-hosted-runner-group-in-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists self-hosted runners that are in a specific organization group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runners-in-group-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of self-hosted runners that are part of an organization runner group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-self-hosted-runners-in-group-for-org"]; + }; + "/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": { + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Adds a self-hosted runner to a runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` + * scope to use this endpoint. + */ + put: operations["actions/add-self-hosted-runner-to-group-for-org"]; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-self-hosted-runner-from-group-for-org"]; + }; + "/orgs/{org}/actions/runners": { + /** + * Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-self-hosted-runners-for-org"]; + }; + "/orgs/{org}/actions/runners/downloads": { + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-runner-applications-for-org"]; + }; + "/orgs/{org}/actions/runners/registration-token": { + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + post: operations["actions/create-registration-token-for-org"]; + }; + "/orgs/{org}/actions/runners/remove-token": { + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["actions/create-remove-token-for-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}": { + /** + * Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/get-self-hosted-runner-for-org"]; + /** + * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-from-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}/labels": { + /** + * Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["actions/list-labels-for-self-hosted-runner-for-org"]; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + put: operations["actions/set-custom-labels-for-self-hosted-runner-for-org"]; + /** + * Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["actions/add-custom-labels-to-self-hosted-runner-for-org"]; + /** + * Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-all-custom-labels-from-self-hosted-runner-for-org"]; + }; + "/orgs/{org}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["actions/remove-custom-label-from-self-hosted-runner-for-org"]; + }; + "/orgs/{org}/actions/secrets": { + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/list-org-secrets"]; + }; + "/orgs/{org}/actions/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/get-org-public-key"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}": { + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/get-org-secret"]; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-org-secret"]; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + delete: operations["actions/delete-org-secret"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}/repositories": { + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + get: operations["actions/list-selected-repos-for-org-secret"]; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + put: operations["actions/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": { + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + put: operations["actions/add-selected-repo-to-org-secret"]; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + delete: operations["actions/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/blocks": { + /** List the users blocked by an organization. */ + get: operations["orgs/list-blocked-users"]; + }; + "/orgs/{org}/blocks/{username}": { + get: operations["orgs/check-blocked-user"]; + put: operations["orgs/block-user"]; + delete: operations["orgs/unblock-user"]; + }; + "/orgs/{org}/code-scanning/alerts": { + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/list-alerts-for-org"]; + }; + "/orgs/{org}/codespaces": { + /** + * Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + get: operations["codespaces/list-in-organization"]; + }; + "/orgs/{org}/custom_roles": { + /** + * **Note**: This operation is in beta and is subject to change. + * + * Creates a custom repository role that can be used by all repositories owned by the organization. + * + * To use this endpoint the authenticated user must be an administrator for the organization and must use an access token with `admin:org` scope. + * GitHub Apps must have the `organization_custom_roles:write` organization permission to use this endpoint. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)." + */ + post: operations["orgs/create-custom-role"]; + }; + "/orgs/{org}/custom_roles/{role_id}": { + /** + * **Note**: This operation is in beta and is subject to change. + * + * Deletes a custom role from an organization. Once the custom role has been deleted, any + * user, team, or invitation with the deleted custom role will be reassigned the inherited role. + * + * To use this endpoint the authenticated user must be an administrator for the organization and must use an access token with `admin:org` scope. + * GitHub Apps must have the `organization_custom_roles:write` organization permission to use this endpoint. + * + * For more information about custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)." + */ + delete: operations["orgs/delete-custom-role"]; + /** + * **Note**: This operation is in beta and subject to change. + * + * Updates a custom repository role that can be used by all repositories owned by the organization. + * + * To use this endpoint the authenticated user must be an administrator for the organization and must use an access token with `admin:org` scope. + * GitHub Apps must have the `organization_custom_roles:write` organization permission to use this endpoint. + * + * For more information about custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)." + */ + patch: operations["orgs/update-custom-role"]; + }; + "/orgs/{org}/dependabot/secrets": { + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/list-org-secrets"]; + }; + "/orgs/{org}/dependabot/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/get-org-public-key"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}": { + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/get-org-secret"]; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["dependabot/create-or-update-org-secret"]; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + delete: operations["dependabot/delete-org-secret"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories": { + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + get: operations["dependabot/list-selected-repos-for-org-secret"]; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + put: operations["dependabot/set-selected-repos-for-org-secret"]; + }; + "/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": { + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + put: operations["dependabot/add-selected-repo-to-org-secret"]; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + delete: operations["dependabot/remove-selected-repo-from-org-secret"]; + }; + "/orgs/{org}/events": { + get: operations["activity/list-public-org-events"]; + }; + "/orgs/{org}/failed_invitations": { + /** The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. */ + get: operations["orgs/list-failed-invitations"]; + }; + "/orgs/{org}/fine_grained_permissions": { + /** + * **Note**: This operation is in beta and subject to change. + * + * Lists the fine-grained permissions available for an organization. + * + * To use this endpoint the authenticated user must be an administrator for the organization or of an repository of the organizaiton and must use an access token with `admin:org repo` scope. + * GitHub Apps must have the `organization_custom_roles:read` organization permission to use this endpoint. + */ + get: operations["orgs/list-fine-grained-permissions"]; + }; + "/orgs/{org}/hooks": { + get: operations["orgs/list-webhooks"]; + /** Here's how you can create a hook that posts payloads in JSON format: */ + post: operations["orgs/create-webhook"]; + }; + "/orgs/{org}/hooks/{hook_id}": { + /** Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." */ + get: operations["orgs/get-webhook"]; + delete: operations["orgs/delete-webhook"]; + /** Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." */ + patch: operations["orgs/update-webhook"]; + }; + "/orgs/{org}/hooks/{hook_id}/config": { + /** + * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + get: operations["orgs/get-webhook-config-for-org"]; + /** + * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + patch: operations["orgs/update-webhook-config-for-org"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries": { + /** Returns a list of webhook deliveries for a webhook configured in an organization. */ + get: operations["orgs/list-webhook-deliveries"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": { + /** Returns a delivery for a webhook configured in an organization. */ + get: operations["orgs/get-webhook-delivery"]; + }; + "/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": { + /** Redeliver a delivery for a webhook configured in an organization. */ + post: operations["orgs/redeliver-webhook-delivery"]; + }; + "/orgs/{org}/hooks/{hook_id}/pings": { + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + post: operations["orgs/ping-webhook"]; + }; + "/orgs/{org}/installation": { + /** + * Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-org-installation"]; + }; + "/orgs/{org}/installations": { + /** Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. */ + get: operations["orgs/list-app-installations"]; + }; + "/orgs/{org}/interaction-limits": { + /** Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. */ + get: operations["interactions/get-restrictions-for-org"]; + /** Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. */ + put: operations["interactions/set-restrictions-for-org"]; + /** Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. */ + delete: operations["interactions/remove-restrictions-for-org"]; + }; + "/orgs/{org}/invitations": { + /** The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. */ + get: operations["orgs/list-pending-invitations"]; + /** + * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["orgs/create-invitation"]; + }; + "/orgs/{org}/invitations/{invitation_id}": { + /** + * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + delete: operations["orgs/cancel-invitation"]; + }; + "/orgs/{org}/invitations/{invitation_id}/teams": { + /** List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. */ + get: operations["orgs/list-invitation-teams"]; + }; + "/orgs/{org}/issues": { + /** + * List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-org"]; + }; + "/orgs/{org}/members": { + /** List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. */ + get: operations["orgs/list-members"]; + }; + "/orgs/{org}/members/{username}": { + /** Check if a user is, publicly or privately, a member of the organization. */ + get: operations["orgs/check-membership-for-user"]; + /** Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. */ + delete: operations["orgs/remove-member"]; + }; + "/orgs/{org}/members/{username}/codespaces/{codespace_name}": { + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + delete: operations["codespaces/delete-from-organization"]; + }; + "/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": { + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + post: operations["codespaces/stop-in-organization"]; + }; + "/orgs/{org}/memberships/{username}": { + /** In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. */ + get: operations["orgs/get-membership-for-user"]; + /** + * Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + put: operations["orgs/set-membership-for-user"]; + /** + * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + delete: operations["orgs/remove-membership-for-user"]; + }; + "/orgs/{org}/migrations": { + /** Lists the most recent migrations. */ + get: operations["migrations/list-for-org"]; + /** Initiates the generation of a migration archive. */ + post: operations["migrations/start-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}": { + /** + * Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + get: operations["migrations/get-status-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/archive": { + /** Fetches the URL to a migration archive. */ + get: operations["migrations/download-archive-for-org"]; + /** Deletes a previous migration archive. Migration archives are automatically deleted after seven days. */ + delete: operations["migrations/delete-archive-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": { + /** Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. */ + delete: operations["migrations/unlock-repo-for-org"]; + }; + "/orgs/{org}/migrations/{migration_id}/repositories": { + /** List all the repositories for this organization migration. */ + get: operations["migrations/list-repos-for-org"]; + }; + "/orgs/{org}/outside_collaborators": { + /** List all users who are outside collaborators of an organization. */ + get: operations["orgs/list-outside-collaborators"]; + }; + "/orgs/{org}/outside_collaborators/{username}": { + /** When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + put: operations["orgs/convert-member-to-outside-collaborator"]; + /** Removing a user from this list will remove them from all the organization's repositories. */ + delete: operations["orgs/remove-outside-collaborator"]; + }; + "/orgs/{org}/packages": { + /** + * Lists all packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/list-packages-for-organization"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}": { + /** + * Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-for-organization"]; + /** + * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/restore": { + /** + * Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions": { + /** + * Lists package versions for a package owned by an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-version-for-organization"]; + /** + * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-version-for-org"]; + }; + "/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-version-for-org"]; + }; + "/orgs/{org}/projects": { + /** Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + get: operations["projects/list-for-org"]; + /** Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + post: operations["projects/create-for-org"]; + }; + "/orgs/{org}/public_members": { + /** Members of an organization can choose to have their membership publicized or not. */ + get: operations["orgs/list-public-members"]; + }; + "/orgs/{org}/public_members/{username}": { + get: operations["orgs/check-public-membership-for-user"]; + /** + * The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["orgs/set-public-membership-for-authenticated-user"]; + delete: operations["orgs/remove-public-membership-for-authenticated-user"]; + }; + "/orgs/{org}/repos": { + /** Lists repositories for the specified organization. */ + get: operations["repos/list-for-org"]; + /** + * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + post: operations["repos/create-in-org"]; + }; + "/orgs/{org}/secret-scanning/alerts": { + /** + * Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-alerts-for-org"]; + }; + "/orgs/{org}/security-managers": { + /** + * Lists teams that are security managers for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `read:org` scope. + * + * GitHub Apps must have the `administration` organization read permission to use this endpoint. + */ + get: operations["orgs/list-security-manager-teams"]; + }; + "/orgs/{org}/security-managers/teams/{team_slug}": { + /** + * Adds a team as a security manager for an organization. For more information, see "[Managing security for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) for an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `write:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + put: operations["orgs/add-security-manager-team"]; + /** + * Removes the security manager role from a team for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) team from an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `admin:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + delete: operations["orgs/remove-security-manager-team"]; + }; + "/orgs/{org}/settings/billing/actions": { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-github-actions-billing-org"]; + }; + "/orgs/{org}/settings/billing/advanced-security": { + /** + * Gets the GitHub Advanced Security active committers for an organization per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of advanced_security_committers for each repository. + * + * If this organization defers to an enterprise for billing, the `total_advanced_security_committers` returned from the organization API may include some users that are in more than one organization, so they will only consume a single Advanced Security seat at the enterprise level. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + get: operations["billing/get-github-advanced-security-billing-org"]; + }; + "/orgs/{org}/settings/billing/packages": { + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-github-packages-billing-org"]; + }; + "/orgs/{org}/settings/billing/shared-storage": { + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + get: operations["billing/get-shared-storage-billing-org"]; + }; + "/orgs/{org}/teams": { + /** Lists all teams in an organization that are visible to the authenticated user. */ + get: operations["teams/list"]; + /** + * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + post: operations["teams/create"]; + }; + "/orgs/{org}/teams/{team_slug}": { + /** + * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + get: operations["teams/get-by-name"]; + /** + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + delete: operations["teams/delete-in-org"]; + /** + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + patch: operations["teams/update-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions": { + /** + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + get: operations["teams/list-discussions-in-org"]; + /** + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + post: operations["teams/create-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": { + /** + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + get: operations["teams/get-discussion-in-org"]; + /** + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + delete: operations["teams/delete-discussion-in-org"]; + /** + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + patch: operations["teams/update-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": { + /** + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + get: operations["teams/list-discussion-comments-in-org"]; + /** + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + post: operations["teams/create-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": { + /** + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + get: operations["teams/get-discussion-comment-in-org"]; + /** + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + delete: operations["teams/delete-discussion-comment-in-org"]; + /** + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + patch: operations["teams/update-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + /** + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + get: operations["reactions/list-for-team-discussion-comment-in-org"]; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + post: operations["reactions/create-for-team-discussion-comment-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["reactions/delete-for-team-discussion-comment"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": { + /** + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + get: operations["reactions/list-for-team-discussion-in-org"]; + /** + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + post: operations["reactions/create-for-team-discussion-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["reactions/delete-for-team-discussion"]; + }; + "/orgs/{org}/teams/{team_slug}/invitations": { + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + get: operations["teams/list-pending-invitations-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/members": { + /** + * Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + get: operations["teams/list-members-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/memberships/{username}": { + /** + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + get: operations["teams/get-membership-for-user-in-org"]; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + put: operations["teams/add-or-update-membership-for-user-in-org"]; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + delete: operations["teams/remove-membership-for-user-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/projects": { + /** + * Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + get: operations["teams/list-projects-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/projects/{project_id}": { + /** + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + get: operations["teams/check-permissions-for-project-in-org"]; + /** + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + put: operations["teams/add-or-update-project-permissions-in-org"]; + /** + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + delete: operations["teams/remove-project-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/repos": { + /** + * Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + get: operations["teams/list-repos-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": { + /** + * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + get: operations["teams/check-permissions-for-repo-in-org"]; + /** + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + put: operations["teams/add-or-update-repo-permissions-in-org"]; + /** + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + delete: operations["teams/remove-repo-in-org"]; + }; + "/orgs/{org}/teams/{team_slug}/teams": { + /** + * Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + get: operations["teams/list-child-in-org"]; + }; + "/orgs/{org}/{security_product}/{enablement}": { + /** + * Enables or disables the specified security feature for all repositories in an organization. + * + * To use this endpoint, you must be an organization owner or be member of a team with the security manager role. + * A token with the 'write:org' scope is also required. + * + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + * + * For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + post: operations["orgs/enable-or-disable-security-product-on-all-org-repos"]; + }; + "/projects/columns/cards/{card_id}": { + get: operations["projects/get-card"]; + delete: operations["projects/delete-card"]; + patch: operations["projects/update-card"]; + }; + "/projects/columns/cards/{card_id}/moves": { + post: operations["projects/move-card"]; + }; + "/projects/columns/{column_id}": { + get: operations["projects/get-column"]; + delete: operations["projects/delete-column"]; + patch: operations["projects/update-column"]; + }; + "/projects/columns/{column_id}/cards": { + get: operations["projects/list-cards"]; + post: operations["projects/create-card"]; + }; + "/projects/columns/{column_id}/moves": { + post: operations["projects/move-column"]; + }; + "/projects/{project_id}": { + /** Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + get: operations["projects/get"]; + /** Deletes a project board. Returns a `404 Not Found` status if projects are disabled. */ + delete: operations["projects/delete"]; + /** Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + patch: operations["projects/update"]; + }; + "/projects/{project_id}/collaborators": { + /** Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. */ + get: operations["projects/list-collaborators"]; + }; + "/projects/{project_id}/collaborators/{username}": { + /** Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. */ + put: operations["projects/add-collaborator"]; + /** Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. */ + delete: operations["projects/remove-collaborator"]; + }; + "/projects/{project_id}/collaborators/{username}/permission": { + /** Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. */ + get: operations["projects/get-permission-for-user"]; + }; + "/projects/{project_id}/columns": { + get: operations["projects/list-columns"]; + post: operations["projects/create-column"]; + }; + "/rate_limit": { + /** + * **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + get: operations["rate-limit/get"]; + }; + "/repos/{owner}/{repo}": { + /** The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. */ + get: operations["repos/get"]; + /** + * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + delete: operations["repos/delete"]; + /** **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. */ + patch: operations["repos/update"]; + }; + "/repos/{owner}/{repo}/actions/artifacts": { + /** Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/list-artifacts-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}": { + /** Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-artifact"]; + /** Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + delete: operations["actions/delete-artifact"]; + }; + "/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": { + /** + * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-artifact"]; + }; + "/repos/{owner}/{repo}/actions/cache/usage": { + /** + * Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-usage"]; + }; + "/repos/{owner}/{repo}/actions/caches": { + /** + * Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-actions-cache-list"]; + /** + * Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-actions-cache-by-key"]; + }; + "/repos/{owner}/{repo}/actions/caches/{cache_id}": { + /** + * Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + delete: operations["actions/delete-actions-cache-by-id"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}": { + /** Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-job-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}/logs": { + /** + * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-job-logs-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": { + /** Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + post: operations["actions/re-run-job-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/permissions": { + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + get: operations["actions/get-github-actions-permissions-repository"]; + /** + * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + put: operations["actions/set-github-actions-permissions-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/access": { + /** + * Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + get: operations["actions/get-workflow-access-to-repository"]; + /** + * Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + put: operations["actions/set-workflow-access-to-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/selected-actions": { + /** + * Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + get: operations["actions/get-allowed-actions-repository"]; + /** + * Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * If the repository belongs to an organization or enterprise that has `selected` actions and reusable workflows set at the organization or enterprise levels, then you cannot override any of the allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + put: operations["actions/set-allowed-actions-repository"]; + }; + "/repos/{owner}/{repo}/actions/permissions/workflow": { + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + get: operations["actions/get-github-actions-default-workflow-permissions-repository"]; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + put: operations["actions/set-github-actions-default-workflow-permissions-repository"]; + }; + "/repos/{owner}/{repo}/actions/runners": { + /** Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + get: operations["actions/list-self-hosted-runners-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/downloads": { + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + get: operations["actions/list-runner-applications-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/registration-token": { + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + post: operations["actions/create-registration-token-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/remove-token": { + /** + * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + post: operations["actions/create-remove-token-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}": { + /** + * Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + get: operations["actions/get-self-hosted-runner-for-repo"]; + /** + * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + delete: operations["actions/delete-self-hosted-runner-from-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels": { + /** + * Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + get: operations["actions/list-labels-for-self-hosted-runner-for-repo"]; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + put: operations["actions/set-custom-labels-for-self-hosted-runner-for-repo"]; + /** + * Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + post: operations["actions/add-custom-labels-to-self-hosted-runner-for-repo"]; + /** + * Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + delete: operations["actions/remove-all-custom-labels-from-self-hosted-runner-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": { + /** + * Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + delete: operations["actions/remove-custom-label-from-self-hosted-runner-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runs": { + /** + * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/list-workflow-runs-for-repo"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}": { + /** Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-workflow-run"]; + /** + * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + delete: operations["actions/delete-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approvals": { + /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-reviews-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/approve": { + /** + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + post: operations["actions/approve-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": { + /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/list-workflow-run-artifacts"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": { + /** + * Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + get: operations["actions/get-workflow-run-attempt"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": { + /** Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + get: operations["actions/list-jobs-for-workflow-run-attempt"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": { + /** + * Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-workflow-run-attempt-logs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/cancel": { + /** Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + post: operations["actions/cancel-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/jobs": { + /** Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + get: operations["actions/list-jobs-for-workflow-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/logs": { + /** + * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + get: operations["actions/download-workflow-run-logs"]; + /** Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + delete: operations["actions/delete-workflow-run-logs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": { + /** + * Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-pending-deployments-for-run"]; + /** + * Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + post: operations["actions/review-pending-deployments-for-run"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun": { + /** Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + post: operations["actions/re-run-workflow"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": { + /** Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + post: operations["actions/re-run-workflow-failed-jobs"]; + }; + "/repos/{owner}/{repo}/actions/runs/{run_id}/timing": { + /** + * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-run-usage"]; + }; + "/repos/{owner}/{repo}/actions/secrets": { + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/actions/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/actions/secrets/{secret_name}": { + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-repo-secret"]; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-repo-secret"]; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + delete: operations["actions/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/actions/workflows": { + /** Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/list-repo-workflows"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}": { + /** Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + get: operations["actions/get-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": { + /** + * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/disable-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": { + /** + * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + post: operations["actions/create-workflow-dispatch"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": { + /** + * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + put: operations["actions/enable-workflow"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": { + /** + * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + get: operations["actions/list-workflow-runs"]; + }; + "/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": { + /** + * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["actions/get-workflow-usage"]; + }; + "/repos/{owner}/{repo}/assignees": { + /** Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. */ + get: operations["issues/list-assignees"]; + }; + "/repos/{owner}/{repo}/assignees/{assignee}": { + /** + * Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + get: operations["issues/check-user-can-be-assigned"]; + }; + "/repos/{owner}/{repo}/autolinks": { + /** + * This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + get: operations["repos/list-autolinks"]; + /** Users with admin access to the repository can create an autolink. */ + post: operations["repos/create-autolink"]; + }; + "/repos/{owner}/{repo}/autolinks/{autolink_id}": { + /** + * This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + get: operations["repos/get-autolink"]; + /** + * This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + delete: operations["repos/delete-autolink"]; + }; + "/repos/{owner}/{repo}/automated-security-fixes": { + /** Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + put: operations["repos/enable-automated-security-fixes"]; + /** Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + delete: operations["repos/disable-automated-security-fixes"]; + }; + "/repos/{owner}/{repo}/branches": { + get: operations["repos/list-branches"]; + }; + "/repos/{owner}/{repo}/branches/{branch}": { + get: operations["repos/get-branch"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-branch-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + put: operations["repos/update-branch-protection"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/delete-branch-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-admin-branch-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + post: operations["repos/set-admin-branch-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + delete: operations["repos/delete-admin-branch-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-pull-request-review-protection"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/delete-pull-request-review-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + patch: operations["repos/update-pull-request-review-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + get: operations["repos/get-commit-signature-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + post: operations["repos/create-commit-signature-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + delete: operations["repos/delete-commit-signature-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-status-checks-protection"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/remove-status-check-protection"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + patch: operations["repos/update-status-check-protection"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": { + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["repos/get-all-status-check-contexts"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + put: operations["repos/set-status-check-contexts"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + post: operations["repos/add-status-check-contexts"]; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + delete: operations["repos/remove-status-check-contexts"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + get: operations["repos/get-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + delete: operations["repos/delete-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + get: operations["repos/get-apps-with-access-to-protected-branch"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-app-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-app-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-app-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + get: operations["repos/get-teams-with-access-to-protected-branch"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-team-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-team-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-team-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + get: operations["repos/get-users-with-access-to-protected-branch"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + put: operations["repos/set-user-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + post: operations["repos/add-user-access-restrictions"]; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + delete: operations["repos/remove-user-access-restrictions"]; + }; + "/repos/{owner}/{repo}/branches/{branch}/rename": { + /** + * Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + post: operations["repos/rename-branch"]; + }; + "/repos/{owner}/{repo}/check-runs": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + post: operations["checks/create"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/get"]; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + patch: operations["checks/update"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": { + /** Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. */ + get: operations["checks/list-annotations"]; + }; + "/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": { + /** + * Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + post: operations["checks/rerequest-run"]; + }; + "/repos/{owner}/{repo}/check-suites": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + post: operations["checks/create-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/preferences": { + /** Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. */ + patch: operations["checks/set-suites-preferences"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + get: operations["checks/get-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/list-for-suite"]; + }; + "/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": { + /** + * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + post: operations["checks/rerequest-suite"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts": { + /** + * Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + get: operations["code-scanning/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": { + /** + * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. + */ + get: operations["code-scanning/get-alert"]; + /** Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. */ + patch: operations["code-scanning/update-alert"]; + }; + "/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": { + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + get: operations["code-scanning/list-alert-instances"]; + }; + "/repos/{owner}/{repo}/code-scanning/analyses": { + /** + * Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + get: operations["code-scanning/list-recent-analyses"]; + }; + "/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": { + /** + * Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + get: operations["code-scanning/get-analysis"]; + /** + * Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `category` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + delete: operations["code-scanning/delete-analysis"]; + }; + "/repos/{owner}/{repo}/code-scanning/codeql/databases": { + /** + * Lists the CodeQL databases that are available in a repository. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + get: operations["code-scanning/list-codeql-databases"]; + }; + "/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}": { + /** + * Gets a CodeQL database for a language in a repository. + * + * By default this endpoint returns JSON metadata about the CodeQL database. To + * download the CodeQL database binary content, set the `Accept` header of the request + * to [`application/zip`](https://docs.github.com/rest/overview/media-types), and make sure + * your HTTP client is configured to follow redirects or use the `Location` header + * to make a second request to get the redirect URL. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + get: operations["code-scanning/get-codeql-database"]; + }; + "/repos/{owner}/{repo}/code-scanning/sarifs": { + /** + * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + * + * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. + * + * The `202 Accepted`, response includes an `id` value. + * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + post: operations["code-scanning/upload-sarif"]; + }; + "/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": { + /** Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. */ + get: operations["code-scanning/get-sarif"]; + }; + "/repos/{owner}/{repo}/codeowners/errors": { + /** + * List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + get: operations["repos/codeowners-errors"]; + }; + "/repos/{owner}/{repo}/codespaces": { + /** + * Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/list-in-repository-for-authenticated-user"]; + /** + * Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-with-repo-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/devcontainers": { + /** + * Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/list-devcontainers-in-repository-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/machines": { + /** + * List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/repo-machines-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/new": { + /** + * Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/pre-flight-with-repo-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets": { + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + get: operations["codespaces/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + get: operations["codespaces/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/codespaces/secrets/{secret_name}": { + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + get: operations["codespaces/get-repo-secret"]; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository + * permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-repo-secret"]; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + delete: operations["codespaces/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/collaborators": { + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + get: operations["repos/list-collaborators"]; + }; + "/repos/{owner}/{repo}/collaborators/{username}": { + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + get: operations["repos/check-collaborator"]; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + put: operations["repos/add-collaborator"]; + delete: operations["repos/remove-collaborator"]; + }; + "/repos/{owner}/{repo}/collaborators/{username}/permission": { + /** Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. */ + get: operations["repos/get-collaborator-permission-level"]; + }; + "/repos/{owner}/{repo}/comments": { + /** + * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + get: operations["repos/list-commit-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}": { + get: operations["repos/get-commit-comment"]; + delete: operations["repos/delete-commit-comment"]; + patch: operations["repos/update-commit-comment"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}/reactions": { + /** List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). */ + get: operations["reactions/list-for-commit-comment"]; + /** Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. */ + post: operations["reactions/create-for-commit-comment"]; + }; + "/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + delete: operations["reactions/delete-for-commit-comment"]; + }; + "/repos/{owner}/{repo}/commits": { + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/list-commits"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": { + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + get: operations["repos/list-branches-for-head-commit"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/comments": { + /** Use the `:commit_sha` to specify the commit that will have its comments listed. */ + get: operations["repos/list-comments-for-commit"]; + /** + * Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["repos/create-commit-comment"]; + }; + "/repos/{owner}/{repo}/commits/{commit_sha}/pulls": { + /** Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. */ + get: operations["repos/list-pull-requests-associated-with-commit"]; + }; + "/repos/{owner}/{repo}/commits/{ref}": { + /** + * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/get-commit"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/check-runs": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + get: operations["checks/list-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/check-suites": { + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + get: operations["checks/list-suites-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/status": { + /** + * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + get: operations["repos/get-combined-status-for-ref"]; + }; + "/repos/{owner}/{repo}/commits/{ref}/statuses": { + /** + * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + get: operations["repos/list-commit-statuses-for-ref"]; + }; + "/repos/{owner}/{repo}/community/profile": { + /** + * Returns all community profile metrics for a repository. The repository must be public, and cannot be a fork. + * + * The returned metrics include an overall health score, the repository description, the presence of documentation, the + * detected code of conduct, the detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + get: operations["repos/get-community-profile-metrics"]; + }; + "/repos/{owner}/{repo}/compare/{basehead}": { + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits-with-basehead"]; + }; + "/repos/{owner}/{repo}/contents/{path}": { + /** + * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Notes**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * * Download URLs expire and are meant to be used just once. To ensure the download URL does not expire, please use the contents API to obtain a fresh download URL for each download. + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + get: operations["repos/get-content"]; + /** + * Creates a new file or replaces an existing file in a repository. You must authenticate using an access token with the `workflow` scope to use this endpoint. + * + * **Note:** If you use this endpoint and the "[Delete a file](https://docs.github.com/rest/reference/repos/#delete-file)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + put: operations["repos/create-or-update-file-contents"]; + /** + * Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + * + * **Note:** If you use this endpoint and the "[Create or update file contents](https://docs.github.com/rest/reference/repos/#create-or-update-file-contents)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + delete: operations["repos/delete-file"]; + }; + "/repos/{owner}/{repo}/contributors": { + /** + * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + get: operations["repos/list-contributors"]; + }; + "/repos/{owner}/{repo}/dependabot/alerts": { + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + get: operations["dependabot/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/dependabot/alerts/{alert_number}": { + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + get: operations["dependabot/get-alert"]; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** write permission to use this endpoint. + */ + patch: operations["dependabot/update-alert"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets": { + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + get: operations["dependabot/list-repo-secrets"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets/public-key": { + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + get: operations["dependabot/get-repo-public-key"]; + }; + "/repos/{owner}/{repo}/dependabot/secrets/{secret_name}": { + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + get: operations["dependabot/get-repo-secret"]; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["dependabot/create-or-update-repo-secret"]; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + delete: operations["dependabot/delete-repo-secret"]; + }; + "/repos/{owner}/{repo}/dependency-graph/compare/{basehead}": { + /** Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. */ + get: operations["dependency-graph/diff-range"]; + }; + "/repos/{owner}/{repo}/dependency-graph/snapshots": { + /** Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. */ + post: operations["dependency-graph/create-repository-snapshot"]; + }; + "/repos/{owner}/{repo}/deployments": { + /** Simple filtering of deployments is available via query parameters: */ + get: operations["repos/list-deployments"]; + /** + * Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + post: operations["repos/create-deployment"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}": { + get: operations["repos/get-deployment"]; + /** + * If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." + */ + delete: operations["repos/delete-deployment"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses": { + /** Users with pull access can view deployment statuses for a deployment: */ + get: operations["repos/list-deployment-statuses"]; + /** + * Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + post: operations["repos/create-deployment-status"]; + }; + "/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": { + /** Users with pull access can view a deployment status for a deployment: */ + get: operations["repos/get-deployment-status"]; + }; + "/repos/{owner}/{repo}/dispatches": { + /** + * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + post: operations["repos/create-dispatch-event"]; + }; + "/repos/{owner}/{repo}/environments": { + /** + * Lists the environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-all-environments"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}": { + /** + * **Note:** To get information about name patterns that branches must match in order to deploy to this environment, see "[Get a deployment branch policy](/rest/deployments/branch-policies#get-a-deployment-branch-policy)." + * + * Anyone with read access to the repository can use this endpoint. If the + * repository is private, you must use an access token with the `repo` scope. GitHub + * Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-environment"]; + /** + * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** To create or update name patterns that branches must match in order to deploy to this environment, see "[Deployment branch policies](/rest/deployments/branch-policies)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + put: operations["repos/create-or-update-environment"]; + /** You must authenticate using an access token with the repo scope to use this endpoint. */ + delete: operations["repos/delete-an-environment"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": { + /** + * Lists the deployment branch policies for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/list-deployment-branch-policies"]; + /** + * Creates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + post: operations["repos/create-deployment-branch-policy"]; + }; + "/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": { + /** + * Gets a deployment branch policy for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + get: operations["repos/get-deployment-branch-policy"]; + /** + * Updates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + put: operations["repos/update-deployment-branch-policy"]; + /** + * Deletes a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + delete: operations["repos/delete-deployment-branch-policy"]; + }; + "/repos/{owner}/{repo}/events": { + get: operations["activity/list-repo-events"]; + }; + "/repos/{owner}/{repo}/forks": { + get: operations["repos/list-forks"]; + /** + * Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + */ + post: operations["repos/create-fork"]; + }; + "/repos/{owner}/{repo}/git/blobs": { + post: operations["git/create-blob"]; + }; + "/repos/{owner}/{repo}/git/blobs/{file_sha}": { + /** + * The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + get: operations["git/get-blob"]; + }; + "/repos/{owner}/{repo}/git/commits": { + /** + * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + post: operations["git/create-commit"]; + }; + "/repos/{owner}/{repo}/git/commits/{commit_sha}": { + /** + * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["git/get-commit"]; + }; + "/repos/{owner}/{repo}/git/matching-refs/{ref}": { + /** + * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + get: operations["git/list-matching-refs"]; + }; + "/repos/{owner}/{repo}/git/ref/{ref}": { + /** + * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + get: operations["git/get-ref"]; + }; + "/repos/{owner}/{repo}/git/refs": { + /** Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. */ + post: operations["git/create-ref"]; + }; + "/repos/{owner}/{repo}/git/refs/{ref}": { + delete: operations["git/delete-ref"]; + patch: operations["git/update-ref"]; + }; + "/repos/{owner}/{repo}/git/tags": { + /** + * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + post: operations["git/create-tag"]; + }; + "/repos/{owner}/{repo}/git/tags/{tag_sha}": { + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["git/get-tag"]; + }; + "/repos/{owner}/{repo}/git/trees": { + /** + * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + * + * Returns an error if you try to delete a file that does not exist. + */ + post: operations["git/create-tree"]; + }; + "/repos/{owner}/{repo}/git/trees/{tree_sha}": { + /** + * Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + * + * + * **Note**: The limit for the `tree` array is 100,000 entries with a maximum size of 7 MB when using the `recursive` parameter. + */ + get: operations["git/get-tree"]; + }; + "/repos/{owner}/{repo}/hooks": { + /** Lists webhooks for a repository. `last response` may return null if there have not been any deliveries within 30 days. */ + get: operations["repos/list-webhooks"]; + /** + * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + post: operations["repos/create-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}": { + /** Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." */ + get: operations["repos/get-webhook"]; + delete: operations["repos/delete-webhook"]; + /** Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." */ + patch: operations["repos/update-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/config": { + /** + * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + get: operations["repos/get-webhook-config-for-repo"]; + /** + * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + patch: operations["repos/update-webhook-config-for-repo"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries": { + /** Returns a list of webhook deliveries for a webhook configured in a repository. */ + get: operations["repos/list-webhook-deliveries"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": { + /** Returns a delivery for a webhook configured in a repository. */ + get: operations["repos/get-webhook-delivery"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": { + /** Redeliver a webhook delivery for a webhook configured in a repository. */ + post: operations["repos/redeliver-webhook-delivery"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/pings": { + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + post: operations["repos/ping-webhook"]; + }; + "/repos/{owner}/{repo}/hooks/{hook_id}/tests": { + /** + * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + post: operations["repos/test-push-webhook"]; + }; + "/repos/{owner}/{repo}/import": { + /** + * View the progress of an import. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + get: operations["migrations/get-import-status"]; + /** Start a source import to a GitHub repository using GitHub Importer. */ + put: operations["migrations/start-import"]; + /** Stop an import for a repository. */ + delete: operations["migrations/cancel-import"]; + /** + * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + */ + patch: operations["migrations/update-import"]; + }; + "/repos/{owner}/{repo}/import/authors": { + /** + * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. + */ + get: operations["migrations/get-commit-authors"]; + }; + "/repos/{owner}/{repo}/import/authors/{author_id}": { + /** Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. */ + patch: operations["migrations/map-commit-author"]; + }; + "/repos/{owner}/{repo}/import/large_files": { + /** List files larger than 100MB found during the import */ + get: operations["migrations/get-large-files"]; + }; + "/repos/{owner}/{repo}/import/lfs": { + /** You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://docs.github.com/articles/versioning-large-files/). */ + patch: operations["migrations/set-lfs-preference"]; + }; + "/repos/{owner}/{repo}/installation": { + /** + * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-repo-installation"]; + }; + "/repos/{owner}/{repo}/interaction-limits": { + /** Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. */ + get: operations["interactions/get-restrictions-for-repo"]; + /** Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + put: operations["interactions/set-restrictions-for-repo"]; + /** Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + delete: operations["interactions/remove-restrictions-for-repo"]; + }; + "/repos/{owner}/{repo}/invitations": { + /** When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. */ + get: operations["repos/list-invitations"]; + }; + "/repos/{owner}/{repo}/invitations/{invitation_id}": { + delete: operations["repos/delete-invitation"]; + patch: operations["repos/update-invitation"]; + }; + "/repos/{owner}/{repo}/issues": { + /** + * List issues in a repository. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-repo"]; + /** + * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["issues/create"]; + }; + "/repos/{owner}/{repo}/issues/comments": { + /** By default, Issue Comments are ordered by ascending ID. */ + get: operations["issues/list-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}": { + get: operations["issues/get-comment"]; + delete: operations["issues/delete-comment"]; + patch: operations["issues/update-comment"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": { + /** List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). */ + get: operations["reactions/list-for-issue-comment"]; + /** Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. */ + post: operations["reactions/create-for-issue-comment"]; + }; + "/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + delete: operations["reactions/delete-for-issue-comment"]; + }; + "/repos/{owner}/{repo}/issues/events": { + get: operations["issues/list-events-for-repo"]; + }; + "/repos/{owner}/{repo}/issues/events/{event_id}": { + get: operations["issues/get-event"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}": { + /** + * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/get"]; + /** Issue owners and users with push access can edit an issue. */ + patch: operations["issues/update"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/assignees": { + /** Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. */ + post: operations["issues/add-assignees"]; + /** Removes one or more assignees from an issue. */ + delete: operations["issues/remove-assignees"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/comments": { + /** Issue Comments are ordered by ascending ID. */ + get: operations["issues/list-comments"]; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + post: operations["issues/create-comment"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/events": { + get: operations["issues/list-events"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/labels": { + get: operations["issues/list-labels-on-issue"]; + /** Removes any previous labels and sets the new labels for an issue. */ + put: operations["issues/set-labels"]; + post: operations["issues/add-labels"]; + delete: operations["issues/remove-all-labels"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": { + /** Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. */ + delete: operations["issues/remove-label"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/lock": { + /** + * Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["issues/lock"]; + /** Users with push access can unlock an issue's conversation. */ + delete: operations["issues/unlock"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/reactions": { + /** List the reactions to an [issue](https://docs.github.com/rest/reference/issues). */ + get: operations["reactions/list-for-issue"]; + /** Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. */ + post: operations["reactions/create-for-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + delete: operations["reactions/delete-for-issue"]; + }; + "/repos/{owner}/{repo}/issues/{issue_number}/timeline": { + get: operations["issues/list-events-for-timeline"]; + }; + "/repos/{owner}/{repo}/keys": { + get: operations["repos/list-deploy-keys"]; + /** You can create a read-only deploy key. */ + post: operations["repos/create-deploy-key"]; + }; + "/repos/{owner}/{repo}/keys/{key_id}": { + get: operations["repos/get-deploy-key"]; + /** Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. */ + delete: operations["repos/delete-deploy-key"]; + }; + "/repos/{owner}/{repo}/labels": { + get: operations["issues/list-labels-for-repo"]; + post: operations["issues/create-label"]; + }; + "/repos/{owner}/{repo}/labels/{name}": { + get: operations["issues/get-label"]; + delete: operations["issues/delete-label"]; + patch: operations["issues/update-label"]; + }; + "/repos/{owner}/{repo}/languages": { + /** Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. */ + get: operations["repos/list-languages"]; + }; + "/repos/{owner}/{repo}/lfs": { + put: operations["repos/enable-lfs-for-repo"]; + delete: operations["repos/disable-lfs-for-repo"]; + }; + "/repos/{owner}/{repo}/license": { + /** + * This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + get: operations["licenses/get-for-repo"]; + }; + "/repos/{owner}/{repo}/merge-upstream": { + /** Sync a branch of a forked repository to keep it up-to-date with the upstream repository. */ + post: operations["repos/merge-upstream"]; + }; + "/repos/{owner}/{repo}/merges": { + post: operations["repos/merge"]; + }; + "/repos/{owner}/{repo}/milestones": { + get: operations["issues/list-milestones"]; + post: operations["issues/create-milestone"]; + }; + "/repos/{owner}/{repo}/milestones/{milestone_number}": { + get: operations["issues/get-milestone"]; + delete: operations["issues/delete-milestone"]; + patch: operations["issues/update-milestone"]; + }; + "/repos/{owner}/{repo}/milestones/{milestone_number}/labels": { + get: operations["issues/list-labels-for-milestone"]; + }; + "/repos/{owner}/{repo}/notifications": { + /** List all notifications for the current user. */ + get: operations["activity/list-repo-notifications-for-authenticated-user"]; + /** Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + put: operations["activity/mark-repo-notifications-as-read"]; + }; + "/repos/{owner}/{repo}/pages": { + get: operations["repos/get-pages"]; + /** Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). */ + put: operations["repos/update-information-about-pages-site"]; + /** Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." */ + post: operations["repos/create-pages-site"]; + delete: operations["repos/delete-pages-site"]; + }; + "/repos/{owner}/{repo}/pages/builds": { + get: operations["repos/list-pages-builds"]; + /** + * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + post: operations["repos/request-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/builds/latest": { + get: operations["repos/get-latest-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/builds/{build_id}": { + get: operations["repos/get-pages-build"]; + }; + "/repos/{owner}/{repo}/pages/deployment": { + /** + * Create a GitHub Pages deployment for a repository. + * + * Users must have write permissions. GitHub Apps must have the `pages:write` permission to use this endpoint. + */ + post: operations["repos/create-pages-deployment"]; + }; + "/repos/{owner}/{repo}/pages/health": { + /** + * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. + */ + get: operations["repos/get-pages-health-check"]; + }; + "/repos/{owner}/{repo}/projects": { + /** Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + get: operations["projects/list-for-repo"]; + /** Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + post: operations["projects/create-for-repo"]; + }; + "/repos/{owner}/{repo}/pulls": { + /** Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + get: operations["pulls/list"]; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + post: operations["pulls/create"]; + }; + "/repos/{owner}/{repo}/pulls/comments": { + /** Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. */ + get: operations["pulls/list-review-comments-for-repo"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}": { + /** Provides details for a review comment. */ + get: operations["pulls/get-review-comment"]; + /** Deletes a review comment. */ + delete: operations["pulls/delete-review-comment"]; + /** Enables you to edit a review comment. */ + patch: operations["pulls/update-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": { + /** List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). */ + get: operations["reactions/list-for-pull-request-review-comment"]; + /** Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. */ + post: operations["reactions/create-for-pull-request-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + delete: operations["reactions/delete-for-pull-request-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}": { + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + get: operations["pulls/get"]; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + patch: operations["pulls/update"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/codespaces": { + /** + * Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-with-pr-for-authenticated-user"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/comments": { + /** Lists all review comments for a pull request. By default, review comments are in ascending order by ID. */ + get: operations["pulls/list-review-comments"]; + /** + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/create-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": { + /** + * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["pulls/create-reply-for-review-comment"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/commits": { + /** Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. */ + get: operations["pulls/list-commits"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/files": { + /** **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. */ + get: operations["pulls/list-files"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/merge": { + get: operations["pulls/check-if-merged"]; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + put: operations["pulls/merge"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": { + /** Gets the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. */ + get: operations["pulls/list-requested-reviewers"]; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + post: operations["pulls/request-reviewers"]; + delete: operations["pulls/remove-requested-reviewers"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews": { + /** The list of reviews returns in chronological order. */ + get: operations["pulls/list-reviews"]; + /** + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state are not submitted and therefore do not include the `submitted_at` property in the response. To create a pending review for a pull request, leave the `event` parameter blank. For more information about submitting a `PENDING` review, see "[Submit a review for a pull request](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request)." + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + post: operations["pulls/create-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": { + get: operations["pulls/get-review"]; + /** Update the review summary comment with new text. */ + put: operations["pulls/update-review"]; + delete: operations["pulls/delete-pending-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": { + /** List comments for a specific pull request review. */ + get: operations["pulls/list-comments-for-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": { + /** **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. */ + put: operations["pulls/dismiss-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": { + /** Submits a pending review for a pull request. For more information about creating a pending review for a pull request, see "[Create a review for a pull request](https://docs.github.com/rest/pulls#create-a-review-for-a-pull-request)." */ + post: operations["pulls/submit-review"]; + }; + "/repos/{owner}/{repo}/pulls/{pull_number}/update-branch": { + /** Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. */ + put: operations["pulls/update-branch"]; + }; + "/repos/{owner}/{repo}/readme": { + /** + * Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + get: operations["repos/get-readme"]; + }; + "/repos/{owner}/{repo}/readme/{dir}": { + /** + * Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + get: operations["repos/get-readme-in-directory"]; + }; + "/repos/{owner}/{repo}/releases": { + /** + * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + get: operations["repos/list-releases"]; + /** + * Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["repos/create-release"]; + }; + "/repos/{owner}/{repo}/releases/assets/{asset_id}": { + /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ + get: operations["repos/get-release-asset"]; + delete: operations["repos/delete-release-asset"]; + /** Users with push access to the repository can edit a release asset. */ + patch: operations["repos/update-release-asset"]; + }; + "/repos/{owner}/{repo}/releases/generate-notes": { + /** Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. */ + post: operations["repos/generate-release-notes"]; + }; + "/repos/{owner}/{repo}/releases/latest": { + /** + * View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + get: operations["repos/get-latest-release"]; + }; + "/repos/{owner}/{repo}/releases/tags/{tag}": { + /** Get a published release with the specified tag. */ + get: operations["repos/get-release-by-tag"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}": { + /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + get: operations["repos/get-release"]; + /** Users with push access to the repository can delete a release. */ + delete: operations["repos/delete-release"]; + /** Users with push access to the repository can edit a release. */ + patch: operations["repos/update-release"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/assets": { + get: operations["repos/list-release-assets"]; + /** + * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + post: operations["repos/upload-release-asset"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions": { + /** List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). */ + get: operations["reactions/list-for-release"]; + /** Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. */ + post: operations["reactions/create-for-release"]; + }; + "/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": { + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + delete: operations["reactions/delete-for-release"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts": { + /** + * Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-alerts-for-repo"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": { + /** + * Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/get-alert"]; + /** + * Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + patch: operations["secret-scanning/update-alert"]; + }; + "/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": { + /** + * Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + get: operations["secret-scanning/list-locations-for-alert"]; + }; + "/repos/{owner}/{repo}/stargazers": { + /** + * Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + get: operations["activity/list-stargazers-for-repo"]; + }; + "/repos/{owner}/{repo}/stats/code_frequency": { + /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + get: operations["repos/get-code-frequency-stats"]; + }; + "/repos/{owner}/{repo}/stats/commit_activity": { + /** Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. */ + get: operations["repos/get-commit-activity-stats"]; + }; + "/repos/{owner}/{repo}/stats/contributors": { + /** + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + get: operations["repos/get-contributors-stats"]; + }; + "/repos/{owner}/{repo}/stats/participation": { + /** + * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + */ + get: operations["repos/get-participation-stats"]; + }; + "/repos/{owner}/{repo}/stats/punch_card": { + /** + * Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + get: operations["repos/get-punch-card-stats"]; + }; + "/repos/{owner}/{repo}/statuses/{sha}": { + /** + * Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + post: operations["repos/create-commit-status"]; + }; + "/repos/{owner}/{repo}/subscribers": { + /** Lists the people watching the specified repository. */ + get: operations["activity/list-watchers-for-repo"]; + }; + "/repos/{owner}/{repo}/subscription": { + get: operations["activity/get-repo-subscription"]; + /** If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. */ + put: operations["activity/set-repo-subscription"]; + /** This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). */ + delete: operations["activity/delete-repo-subscription"]; + }; + "/repos/{owner}/{repo}/tags": { + get: operations["repos/list-tags"]; + }; + "/repos/{owner}/{repo}/tags/protection": { + /** + * This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + get: operations["repos/list-tag-protection"]; + /** + * This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + post: operations["repos/create-tag-protection"]; + }; + "/repos/{owner}/{repo}/tags/protection/{tag_protection_id}": { + /** + * This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + delete: operations["repos/delete-tag-protection"]; + }; + "/repos/{owner}/{repo}/tarball/{ref}": { + /** + * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + get: operations["repos/download-tarball-archive"]; + }; + "/repos/{owner}/{repo}/teams": { + get: operations["repos/list-teams"]; + }; + "/repos/{owner}/{repo}/topics": { + get: operations["repos/get-all-topics"]; + put: operations["repos/replace-all-topics"]; + }; + "/repos/{owner}/{repo}/traffic/clones": { + /** Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + get: operations["repos/get-clones"]; + }; + "/repos/{owner}/{repo}/traffic/popular/paths": { + /** Get the top 10 popular contents over the last 14 days. */ + get: operations["repos/get-top-paths"]; + }; + "/repos/{owner}/{repo}/traffic/popular/referrers": { + /** Get the top 10 referrers over the last 14 days. */ + get: operations["repos/get-top-referrers"]; + }; + "/repos/{owner}/{repo}/traffic/views": { + /** Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + get: operations["repos/get-views"]; + }; + "/repos/{owner}/{repo}/transfer": { + /** A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). */ + post: operations["repos/transfer"]; + }; + "/repos/{owner}/{repo}/vulnerability-alerts": { + /** Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + get: operations["repos/check-vulnerability-alerts"]; + /** Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + put: operations["repos/enable-vulnerability-alerts"]; + /** Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + delete: operations["repos/disable-vulnerability-alerts"]; + }; + "/repos/{owner}/{repo}/zipball/{ref}": { + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * + * **Note**: For private repositories, these links are temporary and expire after five minutes. If the repository is empty, you will receive a 404 when you follow the redirect. + */ + get: operations["repos/download-zipball-archive"]; + }; + "/repos/{template_owner}/{template_repo}/generate": { + /** + * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. If the repository is not public, the authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + post: operations["repos/create-using-template"]; + }; + "/repositories": { + /** + * Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. + */ + get: operations["repos/list-public"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets": { + /** Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/list-environment-secrets"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets/public-key": { + /** Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-environment-public-key"]; + }; + "/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": { + /** Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + get: operations["actions/get-environment-secret"]; + /** + * Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["actions/create-or-update-environment-secret"]; + /** Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + delete: operations["actions/delete-environment-secret"]; + }; + "/search/code": { + /** + * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + */ + get: operations["search/code"]; + }; + "/search/commits": { + /** + * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + get: operations["search/commits"]; + }; + "/search/issues": { + /** + * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + get: operations["search/issues-and-pull-requests"]; + }; + "/search/labels": { + /** + * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + get: operations["search/labels"]; + }; + "/search/repositories": { + /** + * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + get: operations["search/repos"]; + }; + "/search/topics": { + /** + * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + get: operations["search/topics"]; + }; + "/search/users": { + /** + * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, public **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + get: operations["search/users"]; + }; + "/teams/{team_id}": { + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. */ + get: operations["teams/get-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. + * + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + */ + delete: operations["teams/delete-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. + * + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. + */ + patch: operations["teams/update-legacy"]; + }; + "/teams/{team_id}/discussions": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. + * + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/list-discussions-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. + * + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["teams/create-discussion-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. + * + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/get-discussion-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. + * + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["teams/delete-discussion-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. + * + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + patch: operations["teams/update-discussion-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. + * + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/list-discussion-comments-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. + * + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + post: operations["teams/create-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. + * + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["teams/get-discussion-comment-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. + * + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + delete: operations["teams/delete-discussion-comment-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. + * + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + patch: operations["teams/update-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. + * + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["reactions/list-for-team-discussion-comment-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. + * + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + */ + post: operations["reactions/create-for-team-discussion-comment-legacy"]; + }; + "/teams/{team_id}/discussions/{discussion_number}/reactions": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. + * + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + get: operations["reactions/list-for-team-discussion-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. + * + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + */ + post: operations["reactions/create-for-team-discussion-legacy"]; + }; + "/teams/{team_id}/invitations": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. + * + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + get: operations["teams/list-pending-invitations-legacy"]; + }; + "/teams/{team_id}/members": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. + * + * Team members will include the members of child teams. + */ + get: operations["teams/list-members-legacy"]; + }; + "/teams/{team_id}/members/{username}": { + /** + * The "Get team member" endpoint (described below) is deprecated. + * + * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + get: operations["teams/get-member-legacy"]; + /** + * The "Add team member" endpoint (described below) is deprecated. + * + * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["teams/add-member-legacy"]; + /** + * The "Remove team member" endpoint (described below) is deprecated. + * + * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + delete: operations["teams/remove-member-legacy"]; + }; + "/teams/{team_id}/memberships/{username}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. + * + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + get: operations["teams/get-membership-for-user-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + */ + put: operations["teams/add-or-update-membership-for-user-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + delete: operations["teams/remove-membership-for-user-legacy"]; + }; + "/teams/{team_id}/projects": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. + * + * Lists the organization projects for a team. + */ + get: operations["teams/list-projects-legacy"]; + }; + "/teams/{team_id}/projects/{project_id}": { + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. + * + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + */ + get: operations["teams/check-permissions-for-project-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. + * + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + */ + put: operations["teams/add-or-update-project-permissions-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. + * + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. + */ + delete: operations["teams/remove-project-legacy"]; + }; + "/teams/{team_id}/repos": { + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. */ + get: operations["teams/list-repos-legacy"]; + }; + "/teams/{team_id}/repos/{owner}/{repo}": { + /** + * **Note**: Repositories inherited through a parent team will also be checked. + * + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + get: operations["teams/check-permissions-for-repo-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. + * + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + put: operations["teams/add-or-update-repo-permissions-legacy"]; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. + * + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. + */ + delete: operations["teams/remove-repo-legacy"]; + }; + "/teams/{team_id}/teams": { + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. */ + get: operations["teams/list-child-legacy"]; + }; + "/user": { + /** + * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + get: operations["users/get-authenticated"]; + /** **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. */ + patch: operations["users/update-authenticated"]; + }; + "/user/blocks": { + /** List the users you've blocked on your personal account. */ + get: operations["users/list-blocked-by-authenticated-user"]; + }; + "/user/blocks/{username}": { + get: operations["users/check-blocked"]; + put: operations["users/block"]; + delete: operations["users/unblock"]; + }; + "/user/codespaces": { + /** + * Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/list-for-authenticated-user"]; + /** + * Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + post: operations["codespaces/create-for-authenticated-user"]; + }; + "/user/codespaces/secrets": { + /** + * Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/list-secrets-for-authenticated-user"]; + }; + "/user/codespaces/secrets/public-key": { + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/get-public-key-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}": { + /** + * Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + get: operations["codespaces/get-secret-for-authenticated-user"]; + /** + * Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + put: operations["codespaces/create-or-update-secret-for-authenticated-user"]; + /** + * Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + delete: operations["codespaces/delete-secret-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}/repositories": { + /** + * List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + get: operations["codespaces/list-repositories-for-secret-for-authenticated-user"]; + /** + * Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + put: operations["codespaces/set-repositories-for-secret-for-authenticated-user"]; + }; + "/user/codespaces/secrets/{secret_name}/repositories/{repository_id}": { + /** + * Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + put: operations["codespaces/add-repository-for-secret-for-authenticated-user"]; + /** + * Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + delete: operations["codespaces/remove-repository-for-secret-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}": { + /** + * Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + get: operations["codespaces/get-for-authenticated-user"]; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + delete: operations["codespaces/delete-for-authenticated-user"]; + /** + * Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + patch: operations["codespaces/update-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/exports": { + /** + * Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/export-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/exports/{export_id}": { + /** + * Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + get: operations["codespaces/get-export-details-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/machines": { + /** + * List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + get: operations["codespaces/codespace-machines-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/start": { + /** + * Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/start-for-authenticated-user"]; + }; + "/user/codespaces/{codespace_name}/stop": { + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + post: operations["codespaces/stop-for-authenticated-user"]; + }; + "/user/email/visibility": { + /** Sets the visibility for your primary email addresses. */ + patch: operations["users/set-primary-email-visibility-for-authenticated-user"]; + }; + "/user/emails": { + /** Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. */ + get: operations["users/list-emails-for-authenticated-user"]; + /** This endpoint is accessible with the `user` scope. */ + post: operations["users/add-email-for-authenticated-user"]; + /** This endpoint is accessible with the `user` scope. */ + delete: operations["users/delete-email-for-authenticated-user"]; + }; + "/user/followers": { + /** Lists the people following the authenticated user. */ + get: operations["users/list-followers-for-authenticated-user"]; + }; + "/user/following": { + /** Lists the people who the authenticated user follows. */ + get: operations["users/list-followed-by-authenticated-user"]; + }; + "/user/following/{username}": { + get: operations["users/check-person-is-followed-by-authenticated"]; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + put: operations["users/follow"]; + /** Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. */ + delete: operations["users/unfollow"]; + }; + "/user/gpg_keys": { + /** Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/list-gpg-keys-for-authenticated-user"]; + /** Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + post: operations["users/create-gpg-key-for-authenticated-user"]; + }; + "/user/gpg_keys/{gpg_key_id}": { + /** View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/get-gpg-key-for-authenticated-user"]; + /** Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + delete: operations["users/delete-gpg-key-for-authenticated-user"]; + }; + "/user/installations": { + /** + * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + get: operations["apps/list-installations-for-authenticated-user"]; + }; + "/user/installations/{installation_id}/repositories": { + /** + * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + get: operations["apps/list-installation-repos-for-authenticated-user"]; + }; + "/user/installations/{installation_id}/repositories/{repository_id}": { + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + put: operations["apps/add-repo-to-installation-for-authenticated-user"]; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + delete: operations["apps/remove-repo-from-installation-for-authenticated-user"]; + }; + "/user/interaction-limits": { + /** Shows which type of GitHub user can interact with your public repositories and when the restriction expires. */ + get: operations["interactions/get-restrictions-for-authenticated-user"]; + /** Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. */ + put: operations["interactions/set-restrictions-for-authenticated-user"]; + /** Removes any interaction restrictions from your public repositories. */ + delete: operations["interactions/remove-restrictions-for-authenticated-user"]; + }; + "/user/issues": { + /** + * List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + get: operations["issues/list-for-authenticated-user"]; + }; + "/user/keys": { + /** Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/list-public-ssh-keys-for-authenticated-user"]; + /** Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + post: operations["users/create-public-ssh-key-for-authenticated-user"]; + }; + "/user/keys/{key_id}": { + /** View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + get: operations["users/get-public-ssh-key-for-authenticated-user"]; + /** Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + delete: operations["users/delete-public-ssh-key-for-authenticated-user"]; + }; + "/user/marketplace_purchases": { + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + get: operations["apps/list-subscriptions-for-authenticated-user"]; + }; + "/user/marketplace_purchases/stubbed": { + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + get: operations["apps/list-subscriptions-for-authenticated-user-stubbed"]; + }; + "/user/memberships/orgs": { + get: operations["orgs/list-memberships-for-authenticated-user"]; + }; + "/user/memberships/orgs/{org}": { + get: operations["orgs/get-membership-for-authenticated-user"]; + patch: operations["orgs/update-membership-for-authenticated-user"]; + }; + "/user/migrations": { + /** Lists all migrations a user has started. */ + get: operations["migrations/list-for-authenticated-user"]; + /** Initiates the generation of a user migration archive. */ + post: operations["migrations/start-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}": { + /** + * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). + */ + get: operations["migrations/get-status-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/archive": { + /** + * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + get: operations["migrations/get-archive-for-authenticated-user"]; + /** Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. */ + delete: operations["migrations/delete-archive-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/repos/{repo_name}/lock": { + /** Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. */ + delete: operations["migrations/unlock-repo-for-authenticated-user"]; + }; + "/user/migrations/{migration_id}/repositories": { + /** Lists all the repositories for this user migration. */ + get: operations["migrations/list-repos-for-authenticated-user"]; + }; + "/user/orgs": { + /** + * List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + get: operations["orgs/list-for-authenticated-user"]; + }; + "/user/packages": { + /** + * Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/list-packages-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}": { + /** + * Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-for-authenticated-user"]; + /** + * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + delete: operations["packages/delete-package-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/restore": { + /** + * Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + post: operations["packages/restore-package-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions": { + /** + * Lists package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-version-for-authenticated-user"]; + /** + * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + delete: operations["packages/delete-package-version-for-authenticated-user"]; + }; + "/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + post: operations["packages/restore-package-version-for-authenticated-user"]; + }; + "/user/projects": { + /** Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + post: operations["projects/create-for-authenticated-user"]; + }; + "/user/public_emails": { + /** Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. */ + get: operations["users/list-public-emails-for-authenticated-user"]; + }; + "/user/repos": { + /** + * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + get: operations["repos/list-for-authenticated-user"]; + /** + * Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + post: operations["repos/create-for-authenticated-user"]; + }; + "/user/repository_invitations": { + /** When authenticating as a user, this endpoint will list all currently open repository invitations for that user. */ + get: operations["repos/list-invitations-for-authenticated-user"]; + }; + "/user/repository_invitations/{invitation_id}": { + delete: operations["repos/decline-invitation-for-authenticated-user"]; + patch: operations["repos/accept-invitation-for-authenticated-user"]; + }; + "/user/ssh_signing_keys": { + /** Lists the SSH signing keys for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + get: operations["users/list-ssh-signing-keys-for-authenticated-user"]; + /** Creates an SSH signing key for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `write:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + post: operations["users/create-ssh-signing-key-for-authenticated-user"]; + }; + "/user/ssh_signing_keys/{ssh_signing_key_id}": { + /** Gets extended details for an SSH signing key. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + get: operations["users/get-ssh-signing-key-for-authenticated-user"]; + /** Deletes an SSH signing key from the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `admin:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + delete: operations["users/delete-ssh-signing-key-for-authenticated-user"]; + }; + "/user/starred": { + /** + * Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + get: operations["activity/list-repos-starred-by-authenticated-user"]; + }; + "/user/starred/{owner}/{repo}": { + get: operations["activity/check-repo-is-starred-by-authenticated-user"]; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + put: operations["activity/star-repo-for-authenticated-user"]; + delete: operations["activity/unstar-repo-for-authenticated-user"]; + }; + "/user/subscriptions": { + /** Lists repositories the authenticated user is watching. */ + get: operations["activity/list-watched-repos-for-authenticated-user"]; + }; + "/user/teams": { + /** List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). */ + get: operations["teams/list-for-authenticated-user"]; + }; + "/users": { + /** + * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. + */ + get: operations["users/list"]; + }; + "/users/{username}": { + /** + * Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + get: operations["users/get-by-username"]; + }; + "/users/{username}/events": { + /** If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. */ + get: operations["activity/list-events-for-authenticated-user"]; + }; + "/users/{username}/events/orgs/{org}": { + /** This is the user's organization dashboard. You must be authenticated as the user to view this. */ + get: operations["activity/list-org-events-for-authenticated-user"]; + }; + "/users/{username}/events/public": { + get: operations["activity/list-public-events-for-user"]; + }; + "/users/{username}/followers": { + /** Lists the people following the specified user. */ + get: operations["users/list-followers-for-user"]; + }; + "/users/{username}/following": { + /** Lists the people who the specified user follows. */ + get: operations["users/list-following-for-user"]; + }; + "/users/{username}/following/{target_user}": { + get: operations["users/check-following-for-user"]; + }; + "/users/{username}/gists": { + /** Lists public gists for the specified user: */ + get: operations["gists/list-for-user"]; + }; + "/users/{username}/gpg_keys": { + /** Lists the GPG keys for a user. This information is accessible by anyone. */ + get: operations["users/list-gpg-keys-for-user"]; + }; + "/users/{username}/hovercard": { + /** + * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + get: operations["users/get-context-for-user"]; + }; + "/users/{username}/installation": { + /** + * Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + get: operations["apps/get-user-installation"]; + }; + "/users/{username}/keys": { + /** Lists the _verified_ public SSH keys for a user. This is accessible by anyone. */ + get: operations["users/list-public-keys-for-user"]; + }; + "/users/{username}/orgs": { + /** + * List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + get: operations["orgs/list-for-user"]; + }; + "/users/{username}/packages": { + /** + * Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/list-packages-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}": { + /** + * Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-for-user"]; + /** + * Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/restore": { + /** + * Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions": { + /** + * Lists package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-all-package-versions-for-package-owned-by-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": { + /** + * Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + get: operations["packages/get-package-version-for-user"]; + /** + * Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + delete: operations["packages/delete-package-version-for-user"]; + }; + "/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": { + /** + * Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + post: operations["packages/restore-package-version-for-user"]; + }; + "/users/{username}/projects": { + get: operations["projects/list-for-user"]; + }; + "/users/{username}/received_events": { + /** These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. */ + get: operations["activity/list-received-events-for-user"]; + }; + "/users/{username}/received_events/public": { + get: operations["activity/list-received-public-events-for-user"]; + }; + "/users/{username}/repos": { + /** Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. */ + get: operations["repos/list-for-user"]; + }; + "/users/{username}/settings/billing/actions": { + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-github-actions-billing-user"]; + }; + "/users/{username}/settings/billing/packages": { + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-github-packages-billing-user"]; + }; + "/users/{username}/settings/billing/shared-storage": { + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + get: operations["billing/get-shared-storage-billing-user"]; + }; + "/users/{username}/ssh_signing_keys": { + /** Lists the SSH signing keys for a user. This operation is accessible by anyone. */ + get: operations["users/list-ssh-signing-keys-for-user"]; + }; + "/users/{username}/starred": { + /** + * Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + get: operations["activity/list-repos-starred-by-user"]; + }; + "/users/{username}/subscriptions": { + /** Lists repositories a user is watching. */ + get: operations["activity/list-repos-watched-by-user"]; + }; + "/zen": { + /** Get a random sentence from the Zen of GitHub */ + get: operations["meta/get-zen"]; + }; + "/repos/{owner}/{repo}/compare/{base}...{head}": { + /** + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + get: operations["repos/compare-commits"]; + }; +} + +export interface components { + schemas: { + root: { + /** Format: uri-template */ + current_user_url: string; + /** Format: uri-template */ + current_user_authorizations_html_url: string; + /** Format: uri-template */ + authorizations_url: string; + /** Format: uri-template */ + code_search_url: string; + /** Format: uri-template */ + commit_search_url: string; + /** Format: uri-template */ + emails_url: string; + /** Format: uri-template */ + emojis_url: string; + /** Format: uri-template */ + events_url: string; + /** Format: uri-template */ + feeds_url: string; + /** Format: uri-template */ + followers_url: string; + /** Format: uri-template */ + following_url: string; + /** Format: uri-template */ + gists_url: string; + /** Format: uri-template */ + hub_url: string; + /** Format: uri-template */ + issue_search_url: string; + /** Format: uri-template */ + issues_url: string; + /** Format: uri-template */ + keys_url: string; + /** Format: uri-template */ + label_search_url: string; + /** Format: uri-template */ + notifications_url: string; + /** Format: uri-template */ + organization_url: string; + /** Format: uri-template */ + organization_repositories_url: string; + /** Format: uri-template */ + organization_teams_url: string; + /** Format: uri-template */ + public_gists_url: string; + /** Format: uri-template */ + rate_limit_url: string; + /** Format: uri-template */ + repository_url: string; + /** Format: uri-template */ + repository_search_url: string; + /** Format: uri-template */ + current_user_repositories_url: string; + /** Format: uri-template */ + starred_url: string; + /** Format: uri-template */ + starred_gists_url: string; + /** Format: uri-template */ + topic_search_url?: string; + /** Format: uri-template */ + user_url: string; + /** Format: uri-template */ + user_organizations_url: string; + /** Format: uri-template */ + user_repositories_url: string; + /** Format: uri-template */ + user_search_url: string; + }; + /** + * Simple User + * @description Simple User + */ + "nullable-simple-user": { + name?: string | null; + email?: string | null; + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example "2020-07-09T00:17:55Z" */ + starred_at?: string; + } | null; + /** + * GitHub app + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + integration: { + /** + * @description Unique identifier of the GitHub app + * @example 37 + */ + id: number; + /** + * @description The slug name of the GitHub app + * @example probot-owners + */ + slug?: string; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description The name of the GitHub app + * @example Probot Owners + */ + name: string; + /** @example The description of the app. */ + description: string | null; + /** + * Format: uri + * @example https://example.com + */ + external_url: string; + /** + * Format: uri + * @example https://github.com/apps/super-ci + */ + html_url: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + updated_at: string; + /** + * @description The set of permissions for the GitHub app + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions: { + issues?: string; + checks?: string; + metadata?: string; + contents?: string; + deployments?: string; + } & { [key: string]: string }; + /** + * @description The list of events for the GitHub app + * @example [ + * "label", + * "deployment" + * ] + */ + events: string[]; + /** + * @description The number of installations associated with the GitHub app + * @example 5 + */ + installations_count?: number; + /** @example "Iv1.25b5d1e65ffc4022" */ + client_id?: string; + /** @example "1d4b2097ac622ba702d19de498f005747a8b21d3" */ + client_secret?: string; + /** @example "6fba8f2fc8a7e8f2cca5577eddd82ca7586b3b6b" */ + webhook_secret?: string | null; + /** @example "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEArYxrNYD/iT5CZVpRJu4rBKmmze3PVmT/gCo2ATUvDvZTPTey\nxcGJ3vvrJXazKk06pN05TN29o98jrYz4cengG3YGsXPNEpKsIrEl8NhbnxapEnM9\nJCMRe0P5JcPsfZlX6hmiT7136GRWiGOUba2X9+HKh8QJVLG5rM007TBER9/z9mWm\nrJuNh+m5l320oBQY/Qq3A7wzdEfZw8qm/mIN0FCeoXH1L6B8xXWaAYBwhTEh6SSn\nZHlO1Xu1JWDmAvBCi0RO5aRSKM8q9QEkvvHP4yweAtK3N8+aAbZ7ovaDhyGz8r6r\nzhU1b8Uo0Z2ysf503WqzQgIajr7Fry7/kUwpgQIDAQABAoIBADwJp80Ko1xHPZDy\nfcCKBDfIuPvkmSW6KumbsLMaQv1aGdHDwwTGv3t0ixSay8CGlxMRtRDyZPib6SvQ\n6OH/lpfpbMdW2ErkksgtoIKBVrDilfrcAvrNZu7NxRNbhCSvN8q0s4ICecjbbVQh\nnueSdlA6vGXbW58BHMq68uRbHkP+k+mM9U0mDJ1HMch67wlg5GbayVRt63H7R2+r\nVxcna7B80J/lCEjIYZznawgiTvp3MSanTglqAYi+m1EcSsP14bJIB9vgaxS79kTu\noiSo93leJbBvuGo8QEiUqTwMw4tDksmkLsoqNKQ1q9P7LZ9DGcujtPy4EZsamSJT\ny8OJt0ECgYEA2lxOxJsQk2kI325JgKFjo92mQeUObIvPfSNWUIZQDTjniOI6Gv63\nGLWVFrZcvQBWjMEQraJA9xjPbblV8PtfO87MiJGLWCHFxmPz2dzoedN+2Coxom8m\nV95CLz8QUShuao6u/RYcvUaZEoYs5bHcTmy5sBK80JyEmafJPtCQVxMCgYEAy3ar\nZr3yv4xRPEPMat4rseswmuMooSaK3SKub19WFI5IAtB/e7qR1Rj9JhOGcZz+OQrl\nT78O2OFYlgOIkJPvRMrPpK5V9lslc7tz1FSh3BZMRGq5jSyD7ETSOQ0c8T2O/s7v\nbeEPbVbDe4mwvM24XByH0GnWveVxaDl51ABD65sCgYB3ZAspUkOA5egVCh8kNpnd\nSd6SnuQBE3ySRlT2WEnCwP9Ph6oPgn+oAfiPX4xbRqkL8q/k0BdHQ4h+zNwhk7+h\nWtPYRAP1Xxnc/F+jGjb+DVaIaKGU18MWPg7f+FI6nampl3Q0KvfxwX0GdNhtio8T\nTj1E+SnFwh56SRQuxSh2gwKBgHKjlIO5NtNSflsUYFM+hyQiPiqnHzddfhSG+/3o\nm5nNaSmczJesUYreH5San7/YEy2UxAugvP7aSY2MxB+iGsiJ9WD2kZzTUlDZJ7RV\nUzWsoqBR+eZfVJ2FUWWvy8TpSG6trh4dFxImNtKejCR1TREpSiTV3Zb1dmahK9GV\nrK9NAoGAbBxRLoC01xfxCTgt5BDiBcFVh4fp5yYKwavJPLzHSpuDOrrI9jDn1oKN\nonq5sDU1i391zfQvdrbX4Ova48BN+B7p63FocP/MK5tyyBoT8zQEk2+vWDOw7H/Z\nu5dTCPxTIsoIwUw1I+7yIxqJzLPFgR2gVBwY1ra/8iAqCj+zeBw=\n-----END RSA PRIVATE KEY-----\n" */ + pem?: string; + }; + /** + * Basic Error + * @description Basic Error + */ + "basic-error": { + message?: string; + documentation_url?: string; + url?: string; + status?: string; + }; + /** + * Validation Error Simple + * @description Validation Error Simple + */ + "validation-error-simple": { + message: string; + documentation_url: string; + errors?: string[]; + }; + /** + * Format: uri + * @description The URL to which the payloads will be delivered. + * @example https://example.com/webhook + */ + "webhook-config-url": string; + /** + * @description The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + * @example "json" + */ + "webhook-config-content-type": string; + /** + * @description If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + * @example "********" + */ + "webhook-config-secret": string; + "webhook-config-insecure-ssl": string | number; + /** + * Webhook Configuration + * @description Configuration object of the webhook + */ + "webhook-config": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + /** + * Simple webhook delivery + * @description Delivery made by a webhook, without request and response information. + */ + "hook-delivery-item": { + /** + * @description Unique identifier of the webhook delivery. + * @example 42 + */ + id: number; + /** + * @description Unique identifier for the event (shared with all deliveries for all webhooks that subscribe to this event). + * @example 58474f00-b361-11eb-836d-0e4f3503ccbe + */ + guid: string; + /** + * Format: date-time + * @description Time when the webhook delivery occurred. + * @example 2021-05-12T20:33:44Z + */ + delivered_at: string; + /** + * @description Whether the webhook delivery is a redelivery. + * @example false + */ + redelivery: boolean; + /** + * @description Time spent delivering. + * @example 0.03 + */ + duration: number; + /** + * @description Describes the response returned after attempting the delivery. + * @example failed to connect + */ + status: string; + /** + * @description Status code received when delivery was made. + * @example 502 + */ + status_code: number; + /** + * @description The event that triggered the delivery. + * @example issues + */ + event: string; + /** + * @description The type of activity for the event that triggered the delivery. + * @example opened + */ + action: string | null; + /** + * @description The id of the GitHub App installation associated with this event. + * @example 123 + */ + installation_id: number | null; + /** + * @description The id of the repository associated with this event. + * @example 123 + */ + repository_id: number | null; + }; + /** + * Scim Error + * @description Scim Error + */ + "scim-error": { + message?: string | null; + documentation_url?: string | null; + detail?: string | null; + status?: number; + scimType?: string | null; + schemas?: string[]; + }; + /** + * Validation Error + * @description Validation Error + */ + "validation-error": { + message: string; + documentation_url: string; + errors?: { + resource?: string; + field?: string; + message?: string; + code: string; + index?: number; + value?: (string | null) | (number | null) | (string[] | null); + }[]; + }; + /** + * Webhook delivery + * @description Delivery made by a webhook. + */ + "hook-delivery": { + /** + * @description Unique identifier of the delivery. + * @example 42 + */ + id: number; + /** + * @description Unique identifier for the event (shared with all deliveries for all webhooks that subscribe to this event). + * @example 58474f00-b361-11eb-836d-0e4f3503ccbe + */ + guid: string; + /** + * Format: date-time + * @description Time when the delivery was delivered. + * @example 2021-05-12T20:33:44Z + */ + delivered_at: string; + /** + * @description Whether the delivery is a redelivery. + * @example false + */ + redelivery: boolean; + /** + * @description Time spent delivering. + * @example 0.03 + */ + duration: number; + /** + * @description Description of the status of the attempted delivery + * @example failed to connect + */ + status: string; + /** + * @description Status code received when delivery was made. + * @example 502 + */ + status_code: number; + /** + * @description The event that triggered the delivery. + * @example issues + */ + event: string; + /** + * @description The type of activity for the event that triggered the delivery. + * @example opened + */ + action: string | null; + /** + * @description The id of the GitHub App installation associated with this event. + * @example 123 + */ + installation_id: number | null; + /** + * @description The id of the repository associated with this event. + * @example 123 + */ + repository_id: number | null; + /** + * @description The URL target of the delivery. + * @example https://www.example.com + */ + url?: string; + request: { + /** @description The request headers sent with the webhook delivery. */ + headers: { [key: string]: unknown } | null; + /** @description The webhook payload. */ + payload: { [key: string]: unknown } | null; + }; + response: { + /** @description The response headers received when the delivery was made. */ + headers: { [key: string]: unknown } | null; + /** @description The response payload received. */ + payload: string | null; + }; + }; + /** + * Simple User + * @description Simple User + */ + "simple-user": { + name?: string | null; + email?: string | null; + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example "2020-07-09T00:17:55Z" */ + starred_at?: string; + }; + /** + * Enterprise + * @description An enterprise account + */ + enterprise: { + /** @description A short description of the enterprise. */ + description?: string | null; + /** + * Format: uri + * @example https://github.com/enterprises/octo-business + */ + html_url: string; + /** + * Format: uri + * @description The enterprise's website URL. + */ + website_url?: string | null; + /** + * @description Unique identifier of the enterprise + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the enterprise. + * @example Octo Business + */ + name: string; + /** + * @description The slug url identifier for the enterprise. + * @example octo-business + */ + slug: string; + /** + * Format: date-time + * @example 2019-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2019-01-26T19:14:43Z + */ + updated_at: string | null; + /** Format: uri */ + avatar_url: string; + }; + /** + * App Permissions + * @description The permissions granted to the user-to-server access token. + * @example { + * "contents": "read", + * "issues": "read", + * "deployments": "write", + * "single_file": "read" + * } + */ + "app-permissions": { + /** + * @description The level of permission to grant the access token for GitHub Actions workflows, workflow runs, and artifacts. + * @enum {string} + */ + actions?: "read" | "write"; + /** + * @description The level of permission to grant the access token for repository creation, deletion, settings, teams, and collaborators creation. + * @enum {string} + */ + administration?: "read" | "write"; + /** + * @description The level of permission to grant the access token for checks on code. + * @enum {string} + */ + checks?: "read" | "write"; + /** + * @description The level of permission to grant the access token for repository contents, commits, branches, downloads, releases, and merges. + * @enum {string} + */ + contents?: "read" | "write"; + /** + * @description The level of permission to grant the access token for deployments and deployment statuses. + * @enum {string} + */ + deployments?: "read" | "write"; + /** + * @description The level of permission to grant the access token for managing repository environments. + * @enum {string} + */ + environments?: "read" | "write"; + /** + * @description The level of permission to grant the access token for issues and related comments, assignees, labels, and milestones. + * @enum {string} + */ + issues?: "read" | "write"; + /** + * @description The level of permission to grant the access token to search repositories, list collaborators, and access repository metadata. + * @enum {string} + */ + metadata?: "read" | "write"; + /** + * @description The level of permission to grant the access token for packages published to GitHub Packages. + * @enum {string} + */ + packages?: "read" | "write"; + /** + * @description The level of permission to grant the access token to retrieve Pages statuses, configuration, and builds, as well as create new builds. + * @enum {string} + */ + pages?: "read" | "write"; + /** + * @description The level of permission to grant the access token for pull requests and related comments, assignees, labels, milestones, and merges. + * @enum {string} + */ + pull_requests?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage the post-receive hooks for a repository. + * @enum {string} + */ + repository_hooks?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage repository projects, columns, and cards. + * @enum {string} + */ + repository_projects?: "read" | "write" | "admin"; + /** + * @description The level of permission to grant the access token to view and manage secret scanning alerts. + * @enum {string} + */ + secret_scanning_alerts?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage repository secrets. + * @enum {string} + */ + secrets?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage security events like code scanning alerts. + * @enum {string} + */ + security_events?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage just a single file. + * @enum {string} + */ + single_file?: "read" | "write"; + /** + * @description The level of permission to grant the access token for commit statuses. + * @enum {string} + */ + statuses?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage Dependabot alerts. + * @enum {string} + */ + vulnerability_alerts?: "read" | "write"; + /** + * @description The level of permission to grant the access token to update GitHub Actions workflow files. + * @enum {string} + */ + workflows?: "write"; + /** + * @description The level of permission to grant the access token for organization teams and members. + * @enum {string} + */ + members?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage access to an organization. + * @enum {string} + */ + organization_administration?: "read" | "write"; + /** + * @description The level of permission to grant the access token for custom roles management. This property is in beta and is subject to change. + * @enum {string} + */ + organization_custom_roles?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage the post-receive hooks for an organization. + * @enum {string} + */ + organization_hooks?: "read" | "write"; + /** + * @description The level of permission to grant the access token for viewing an organization's plan. + * @enum {string} + */ + organization_plan?: "read"; + /** + * @description The level of permission to grant the access token to manage organization projects and projects beta (where available). + * @enum {string} + */ + organization_projects?: "read" | "write" | "admin"; + /** + * @description The level of permission to grant the access token for organization packages published to GitHub Packages. + * @enum {string} + */ + organization_packages?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage organization secrets. + * @enum {string} + */ + organization_secrets?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage GitHub Actions self-hosted runners available to an organization. + * @enum {string} + */ + organization_self_hosted_runners?: "read" | "write"; + /** + * @description The level of permission to grant the access token to view and manage users blocked by the organization. + * @enum {string} + */ + organization_user_blocking?: "read" | "write"; + /** + * @description The level of permission to grant the access token to manage team discussions and related comments. + * @enum {string} + */ + team_discussions?: "read" | "write"; + }; + /** + * Installation + * @description Installation + */ + installation: { + /** + * @description The ID of the installation. + * @example 1 + */ + id: number; + account: + | (Partial & + Partial) + | null; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** + * Format: uri + * @example https://api.github.com/installations/1/access_tokens + */ + access_tokens_url: string; + /** + * Format: uri + * @example https://api.github.com/installation/repositories + */ + repositories_url: string; + /** + * Format: uri + * @example https://github.com/organizations/github/settings/installations/1 + */ + html_url: string; + /** @example 1 */ + app_id: number; + /** @description The ID of the user or organization this token is being scoped to. */ + target_id: number; + /** @example Organization */ + target_type: string; + permissions: components["schemas"]["app-permissions"]; + events: string[]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** @example config.yaml */ + single_file_name: string | null; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + /** @example github-actions */ + app_slug: string; + suspended_by: components["schemas"]["nullable-simple-user"]; + /** Format: date-time */ + suspended_at: string | null; + /** @example "test_13f1e99741e3e004@d7e1eb0bc0a1ba12.com" */ + contact_email?: string | null; + }; + /** + * License Simple + * @description License Simple + */ + "nullable-license-simple": { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MIT */ + spdx_id: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** Format: uri */ + html_url?: string; + } | null; + /** + * Repository + * @description A git repository + */ + repository: { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + owner: components["schemas"]["simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** + * @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. + * @example 108 + */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: { + id?: number; + node_id?: string; + name?: string; + full_name?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }; + private?: boolean; + html_url?: string; + description?: string; + fork?: boolean; + url?: string; + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + downloads_url?: string; + events_url?: string; + forks_url?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + notifications_url?: string; + pulls_url?: string; + releases_url?: string; + ssh_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + clone_url?: string; + mirror_url?: string; + hooks_url?: string; + svn_url?: string; + homepage?: string; + language?: string; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + pushed_at?: string; + created_at?: string; + updated_at?: string; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + allow_rebase_merge?: boolean; + temp_clone_token?: string; + allow_squash_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + allow_merge_commit?: boolean; + subscribers_count?: number; + network_count?: number; + } | null; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether or not a pull request head branch that is behind its base branch can always be updated even if it is not required to be up to date before merging. + * @default false + * @example false + */ + allow_update_branch?: boolean; + /** + * @deprecated + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** @description Whether to allow forking this repo */ + allow_forking?: boolean; + /** + * @description Whether to require contributors to sign off on web-based commits + * @default false + */ + web_commit_signoff_required?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + /** @example "2020-07-09T00:17:42Z" */ + starred_at?: string; + /** @description Whether anonymous git access is enabled for this repository */ + anonymous_access_enabled?: boolean; + }; + /** + * Installation Token + * @description Authentication token for a GitHub App installed on a user or org. + */ + "installation-token": { + token: string; + expires_at: string; + permissions?: components["schemas"]["app-permissions"]; + /** @enum {string} */ + repository_selection?: "all" | "selected"; + repositories?: components["schemas"]["repository"][]; + /** @example README.md */ + single_file?: string; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + }; + /** Scoped Installation */ + "nullable-scoped-installation": { + permissions: components["schemas"]["app-permissions"]; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection: "all" | "selected"; + /** @example config.yaml */ + single_file_name: string | null; + /** @example true */ + has_multiple_single_files?: boolean; + /** + * @example [ + * "config.yml", + * ".github/issue_TEMPLATE.md" + * ] + */ + single_file_paths?: string[]; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repositories_url: string; + account: components["schemas"]["simple-user"]; + } | null; + /** + * Authorization + * @description The authorization for an OAuth app, GitHub App, or a Personal Access Token. + */ + authorization: { + id: number; + /** Format: uri */ + url: string; + /** @description A list of scopes that this authorization is in. */ + scopes: string[] | null; + token: string; + token_last_eight: string | null; + hashed_token: string | null; + app: { + client_id: string; + name: string; + /** Format: uri */ + url: string; + }; + note: string | null; + /** Format: uri */ + note_url: string | null; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + created_at: string; + fingerprint: string | null; + user?: components["schemas"]["nullable-simple-user"]; + installation?: components["schemas"]["nullable-scoped-installation"]; + /** Format: date-time */ + expires_at: string | null; + }; + /** + * Code Of Conduct + * @description Code Of Conduct + */ + "code-of-conduct": { + /** @example contributor_covenant */ + key: string; + /** @example Contributor Covenant */ + name: string; + /** + * Format: uri + * @example https://api.github.com/codes_of_conduct/contributor_covenant + */ + url: string; + /** + * @example # Contributor Covenant Code of Conduct + * + * ## Our Pledge + * + * In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + * + * ## Our Standards + * + * Examples of behavior that contributes to creating a positive environment include: + * + * * Using welcoming and inclusive language + * * Being respectful of differing viewpoints and experiences + * * Gracefully accepting constructive criticism + * * Focusing on what is best for the community + * * Showing empathy towards other community members + * + * Examples of unacceptable behavior by participants include: + * + * * The use of sexualized language or imagery and unwelcome sexual attention or advances + * * Trolling, insulting/derogatory comments, and personal or political attacks + * * Public or private harassment + * * Publishing others' private information, such as a physical or electronic address, without explicit permission + * * Other conduct which could reasonably be considered inappropriate in a professional setting + * + * ## Our Responsibilities + * + * Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response + * to any instances of unacceptable behavior. + * + * Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + * + * ## Scope + * + * This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, + * posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + * + * ## Enforcement + * + * Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [EMAIL]. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + * + * Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + * + * ## Attribution + * + * This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + * + * [homepage]: http://contributor-covenant.org + * [version]: http://contributor-covenant.org/version/1/4/ + */ + body?: string; + /** Format: uri */ + html_url: string | null; + }; + /** + * Server Statistics Proxy Endpoint + * @description Response of S4 Proxy endpoint that provides GHES statistics + */ + "server-statistics": { + server_id?: string; + collection_date?: string; + schema_version?: string; + ghes_version?: string; + host_name?: string; + github_connect?: { + features_enabled?: string[]; + }; + ghe_stats?: { + comments?: { + total_commit_comments?: number; + total_gist_comments?: number; + total_issue_comments?: number; + total_pull_request_comments?: number; + }; + gists?: { + total_gists?: number; + private_gists?: number; + public_gists?: number; + }; + hooks?: { + total_hooks?: number; + active_hooks?: number; + inactive_hooks?: number; + }; + issues?: { + total_issues?: number; + open_issues?: number; + closed_issues?: number; + }; + milestones?: { + total_milestones?: number; + open_milestones?: number; + closed_milestones?: number; + }; + orgs?: { + total_orgs?: number; + disabled_orgs?: number; + total_teams?: number; + total_team_members?: number; + }; + pages?: { + total_pages?: number; + }; + pulls?: { + total_pulls?: number; + merged_pulls?: number; + mergeable_pulls?: number; + unmergeable_pulls?: number; + }; + repos?: { + total_repos?: number; + root_repos?: number; + fork_repos?: number; + org_repos?: number; + total_pushes?: number; + total_wikis?: number; + }; + users?: { + total_users?: number; + admin_users?: number; + suspended_users?: number; + }; + }; + dormant_users?: { + total_dormant_users?: number; + dormancy_threshold?: string; + }; + }[]; + "actions-cache-usage-org-enterprise": { + /** @description The count of active caches across all repositories of an enterprise or an organization. */ + total_active_caches_count: number; + /** @description The total size in bytes of all active cache items across all repositories of an enterprise or an organization. */ + total_active_caches_size_in_bytes: number; + }; + /** + * @description The policy that controls the organizations in the enterprise that are allowed to run GitHub Actions. + * @enum {string} + */ + "enabled-organizations": "all" | "none" | "selected"; + /** + * @description The permissions policy that controls the actions and reusable workflows that are allowed to run. + * @enum {string} + */ + "allowed-actions": "all" | "local_only" | "selected"; + /** @description The API URL to use to get or set the actions and reusable workflows that are allowed to run, when `allowed_actions` is set to `selected`. */ + "selected-actions-url": string; + "actions-enterprise-permissions": { + enabled_organizations: components["schemas"]["enabled-organizations"]; + /** @description The API URL to use to get or set the selected organizations that are allowed to run GitHub Actions, when `enabled_organizations` is set to `selected`. */ + selected_organizations_url?: string; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + /** + * Organization Simple + * @description Organization Simple + */ + "organization-simple": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + }; + "selected-actions": { + /** @description Whether GitHub-owned actions are allowed. For example, this includes the actions in the `actions` organization. */ + github_owned_allowed?: boolean; + /** @description Whether actions from GitHub Marketplace verified creators are allowed. Set to `true` to allow all actions by GitHub Marketplace verified creators. */ + verified_allowed?: boolean; + /** @description Specifies a list of string-matching patterns to allow specific action(s) and reusable workflow(s). Wildcards, tags, and SHAs are allowed. For example, `monalisa/octocat@*`, `monalisa/octocat@v2`, `monalisa/*`." */ + patterns_allowed?: string[]; + }; + /** + * @description The default workflow permissions granted to the GITHUB_TOKEN when running workflows. + * @enum {string} + */ + "actions-default-workflow-permissions": "read" | "write"; + /** @description Whether GitHub Actions can approve pull requests. Enabling this can be a security risk. */ + "actions-can-approve-pull-request-reviews": boolean; + "actions-get-default-workflow-permissions": { + default_workflow_permissions: components["schemas"]["actions-default-workflow-permissions"]; + can_approve_pull_request_reviews: components["schemas"]["actions-can-approve-pull-request-reviews"]; + }; + "actions-set-default-workflow-permissions": { + default_workflow_permissions?: components["schemas"]["actions-default-workflow-permissions"]; + can_approve_pull_request_reviews?: components["schemas"]["actions-can-approve-pull-request-reviews"]; + }; + "runner-groups-enterprise": { + id: number; + name: string; + visibility: string; + default: boolean; + selected_organizations_url?: string; + runners_url: string; + allows_public_repositories: boolean; + /** + * @description If `true`, the `restricted_to_workflows` and `selected_workflows` fields cannot be modified. + * @default false + */ + workflow_restrictions_read_only?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + /** + * Self hosted runner label + * @description A label for a self hosted runner + */ + "runner-label": { + /** @description Unique identifier of the label. */ + id?: number; + /** @description Name of the label. */ + name: string; + /** + * @description The type of label. Read-only labels are applied automatically when the runner is configured. + * @enum {string} + */ + type?: "read-only" | "custom"; + }; + /** + * Self hosted runners + * @description A self hosted runner + */ + runner: { + /** + * @description The id of the runner. + * @example 5 + */ + id: number; + /** + * @description The name of the runner. + * @example iMac + */ + name: string; + /** + * @description The Operating System of the runner. + * @example macos + */ + os: string; + /** + * @description The status of the runner. + * @example online + */ + status: string; + busy: boolean; + labels: components["schemas"]["runner-label"][]; + }; + /** + * Runner Application + * @description Runner Application + */ + "runner-application": { + os: string; + architecture: string; + download_url: string; + filename: string; + /** @description A short lived bearer token used to download the runner, if needed. */ + temp_download_token?: string; + sha256_checksum?: string; + }; + /** + * Authentication Token + * @description Authentication Token + */ + "authentication-token": { + /** + * @description The token used for authentication + * @example v1.1f699f1069f60xxx + */ + token: string; + /** + * Format: date-time + * @description The time this token expires + * @example 2016-07-11T22:14:10Z + */ + expires_at: string; + /** + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions?: { [key: string]: unknown }; + /** @description The repositories this token has access to */ + repositories?: components["schemas"]["repository"][]; + /** @example config.yaml */ + single_file?: string | null; + /** + * @description Describe whether all repositories have been selected or there's a selection involved + * @enum {string} + */ + repository_selection?: "all" | "selected"; + }; + /** @description The name of the tool used to generate the code scanning analysis. */ + "code-scanning-analysis-tool-name": string; + /** @description The GUID of the tool used to generate the code scanning analysis, if provided in the uploaded SARIF data. */ + "code-scanning-analysis-tool-guid": string | null; + /** + * @description State of a code scanning alert. + * @enum {string} + */ + "code-scanning-alert-state": "open" | "closed" | "dismissed" | "fixed"; + /** @description The security alert number. */ + "alert-number": number; + /** + * Format: date-time + * @description The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "alert-created-at": string; + /** + * Format: date-time + * @description The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "alert-updated-at": string; + /** + * Format: uri + * @description The REST API URL of the alert resource. + */ + "alert-url": string; + /** + * Format: uri + * @description The GitHub URL of the alert resource. + */ + "alert-html-url": string; + /** + * Format: uri + * @description The REST API URL for fetching the list of instances for an alert. + */ + "alert-instances-url": string; + /** + * Format: date-time + * @description The time that the alert was no longer detected and was considered fixed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "code-scanning-alert-fixed-at": string | null; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "code-scanning-alert-dismissed-at": string | null; + /** + * @description **Required when the state is dismissed.** The reason for dismissing or closing the alert. + * @enum {string|null} + */ + "code-scanning-alert-dismissed-reason": + | (null | "false positive" | "won't fix" | "used in tests") + | null; + /** @description The dismissal comment associated with the dismissal of the alert. */ + "code-scanning-alert-dismissed-comment": string | null; + "code-scanning-alert-rule": { + /** @description A unique identifier for the rule used to detect the alert. */ + id?: string | null; + /** @description The name of the rule used to detect the alert. */ + name?: string; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity?: ("none" | "note" | "warning" | "error") | null; + /** + * @description The security severity of the alert. + * @enum {string|null} + */ + security_severity_level?: ("low" | "medium" | "high" | "critical") | null; + /** @description A short description of the rule used to detect the alert. */ + description?: string; + /** @description description of the rule used to detect the alert. */ + full_description?: string; + /** @description A set of tags applicable for the rule. */ + tags?: string[] | null; + /** @description Detailed documentation for the rule as GitHub Flavored Markdown. */ + help?: string | null; + /** @description A link to the documentation for the rule used to detect the alert. */ + help_uri?: string | null; + }; + /** @description The version of the tool used to generate the code scanning analysis. */ + "code-scanning-analysis-tool-version": string | null; + "code-scanning-analysis-tool": { + name?: components["schemas"]["code-scanning-analysis-tool-name"]; + version?: components["schemas"]["code-scanning-analysis-tool-version"]; + guid?: components["schemas"]["code-scanning-analysis-tool-guid"]; + }; + /** + * @description The full Git reference, formatted as `refs/heads/`, + * `refs/pull//merge`, or `refs/pull//head`. + */ + "code-scanning-ref": string; + /** @description Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. */ + "code-scanning-analysis-analysis-key": string; + /** @description Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. */ + "code-scanning-alert-environment": string; + /** @description Identifies the configuration under which the analysis was executed. Used to distinguish between multiple analyses for the same tool and commit, but performed on different languages or different parts of the code. */ + "code-scanning-analysis-category": string; + /** @description Describe a region within a file for the alert. */ + "code-scanning-alert-location": { + path?: string; + start_line?: number; + end_line?: number; + start_column?: number; + end_column?: number; + }; + /** + * @description A classification of the file. For example to identify it as generated. + * @enum {string|null} + */ + "code-scanning-alert-classification": + | ("source" | "generated" | "test" | "library") + | null; + "code-scanning-alert-instance": { + ref?: components["schemas"]["code-scanning-ref"]; + analysis_key?: components["schemas"]["code-scanning-analysis-analysis-key"]; + environment?: components["schemas"]["code-scanning-alert-environment"]; + category?: components["schemas"]["code-scanning-analysis-category"]; + state?: components["schemas"]["code-scanning-alert-state"]; + commit_sha?: string; + message?: { + text?: string; + }; + location?: components["schemas"]["code-scanning-alert-location"]; + html_url?: string; + /** + * @description Classifications that have been applied to the file that triggered the alert. + * For example identifying it as documentation, or a generated file. + */ + classifications?: components["schemas"]["code-scanning-alert-classification"][]; + }; + /** + * Simple Repository + * @description Simple Repository + */ + "simple-repository": { + /** + * @description A unique identifier of the repository. + * @example 1296269 + */ + id: number; + /** + * @description The GraphQL identifier of the repository. + * @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 + */ + node_id: string; + /** + * @description The name of the repository. + * @example Hello-World + */ + name: string; + /** + * @description The full, globally unique, name of the repository. + * @example octocat/Hello-World + */ + full_name: string; + owner: components["schemas"]["simple-user"]; + /** @description Whether the repository is private. */ + private: boolean; + /** + * Format: uri + * @description The URL to view the repository on GitHub.com. + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** + * @description The repository description. + * @example This your first repo! + */ + description: string | null; + /** @description Whether the repository is a fork. */ + fork: boolean; + /** + * Format: uri + * @description The URL to get more information about the repository from the GitHub API. + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** + * @description A template for the API URL to download the repository as an archive. + * @example https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} + */ + archive_url: string; + /** + * @description A template for the API URL to list the available assignees for issues in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/assignees{/user} + */ + assignees_url: string; + /** + * @description A template for the API URL to create or retrieve a raw Git blob in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} + */ + blobs_url: string; + /** + * @description A template for the API URL to get information about branches in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/branches{/branch} + */ + branches_url: string; + /** + * @description A template for the API URL to get information about collaborators of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} + */ + collaborators_url: string; + /** + * @description A template for the API URL to get information about comments on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/comments{/number} + */ + comments_url: string; + /** + * @description A template for the API URL to get information about commits on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/commits{/sha} + */ + commits_url: string; + /** + * @description A template for the API URL to compare two commits or refs. + * @example https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} + */ + compare_url: string; + /** + * @description A template for the API URL to get the contents of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/contents/{+path} + */ + contents_url: string; + /** + * Format: uri + * @description A template for the API URL to list the contributors to the repository. + * @example https://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @description The API URL to list the deployments of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @description The API URL to list the downloads on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @description The API URL to list the events of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @description The API URL to list the forks of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** + * @description A template for the API URL to get information about Git commits of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/commits{/sha} + */ + git_commits_url: string; + /** + * @description A template for the API URL to get information about Git refs of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/refs{/sha} + */ + git_refs_url: string; + /** + * @description A template for the API URL to get information about Git tags of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/tags{/sha} + */ + git_tags_url: string; + /** + * @description A template for the API URL to get information about issue comments on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues/comments{/number} + */ + issue_comment_url: string; + /** + * @description A template for the API URL to get information about issue events on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues/events{/number} + */ + issue_events_url: string; + /** + * @description A template for the API URL to get information about issues on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/issues{/number} + */ + issues_url: string; + /** + * @description A template for the API URL to get information about deploy keys on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/keys{/key_id} + */ + keys_url: string; + /** + * @description A template for the API URL to get information about labels of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/labels{/name} + */ + labels_url: string; + /** + * Format: uri + * @description The API URL to get information about the languages of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @description The API URL to merge branches in the repository. + * @example https://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** + * @description A template for the API URL to get information about milestones of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/milestones{/number} + */ + milestones_url: string; + /** + * @description A template for the API URL to get information about notifications on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} + */ + notifications_url: string; + /** + * @description A template for the API URL to get information about pull requests on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/pulls{/number} + */ + pulls_url: string; + /** + * @description A template for the API URL to get information about releases on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/releases{/id} + */ + releases_url: string; + /** + * Format: uri + * @description The API URL to list the stargazers on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** + * @description A template for the API URL to get information about statuses of a commit. + * @example https://api.github.com/repos/octocat/Hello-World/statuses/{sha} + */ + statuses_url: string; + /** + * Format: uri + * @description The API URL to list the subscribers on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @description The API URL to subscribe to notifications for this repository. + * @example https://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @description The API URL to get information about tags on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @description The API URL to list the teams on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** + * @description A template for the API URL to create or retrieve a raw Git tree of the repository. + * @example https://api.github.com/repos/octocat/Hello-World/git/trees{/sha} + */ + trees_url: string; + /** + * Format: uri + * @description The API URL to list the hooks on the repository. + * @example https://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + }; + "code-scanning-organization-alert-items": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["code-scanning-alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + repository: components["schemas"]["simple-repository"]; + }; + /** + * Format: date-time + * @description The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "nullable-alert-updated-at": string | null; + /** + * @description Sets the state of the secret scanning alert. You must provide `resolution` when you set the state to `resolved`. + * @enum {string} + */ + "secret-scanning-alert-state": "open" | "resolved"; + /** + * @description **Required when the `state` is `resolved`.** The reason for resolving the alert. + * @enum {string|null} + */ + "secret-scanning-alert-resolution": + | (null | "false_positive" | "wont_fix" | "revoked" | "used_in_tests") + | null; + "organization-secret-scanning-alert": { + number?: components["schemas"]["alert-number"]; + created_at?: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["nullable-alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + /** @description The secret that was detected. */ + secret?: string; + repository?: components["schemas"]["simple-repository"]; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + /** @description The comment that was optionally added when this alert was closed */ + resolution_comment?: string | null; + }; + "advanced-security-active-committers-user": { + user_login: string; + /** @example 2021-11-03 */ + last_pushed_date: string; + }; + "advanced-security-active-committers-repository": { + /** @example octocat/Hello-World */ + name: string; + /** @example 25 */ + advanced_security_committers: number; + advanced_security_committers_breakdown: components["schemas"]["advanced-security-active-committers-user"][]; + }; + "advanced-security-active-committers": { + /** @example 25 */ + total_advanced_security_committers?: number; + /** @example 2 */ + total_count?: number; + repositories: components["schemas"]["advanced-security-active-committers-repository"][]; + }; + /** + * Actor + * @description Actor + */ + actor: { + id: number; + login: string; + display_login?: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + avatar_url: string; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + "nullable-milestone": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/milestones/v1.0 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1/labels + */ + labels_url: string; + /** @example 1002604 */ + id: number; + /** @example MDk6TWlsZXN0b25lMTAwMjYwNA== */ + node_id: string; + /** + * @description The number of the milestone. + * @example 42 + */ + number: number; + /** + * @description The state of the milestone. + * @default open + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** + * @description The title of the milestone. + * @example v1.0 + */ + title: string; + /** @example Tracking milestone for version 1.0 */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** @example 4 */ + open_issues: number; + /** @example 8 */ + closed_issues: number; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2013-02-12T13:22:01Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2012-10-09T23:39:01Z + */ + due_on: string | null; + } | null; + /** + * GitHub app + * @description GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ + "nullable-integration": { + /** + * @description Unique identifier of the GitHub app + * @example 37 + */ + id: number; + /** + * @description The slug name of the GitHub app + * @example probot-owners + */ + slug?: string; + /** @example MDExOkludGVncmF0aW9uMQ== */ + node_id: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description The name of the GitHub app + * @example Probot Owners + */ + name: string; + /** @example The description of the app. */ + description: string | null; + /** + * Format: uri + * @example https://example.com + */ + external_url: string; + /** + * Format: uri + * @example https://github.com/apps/super-ci + */ + html_url: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2017-07-08T16:18:44-04:00 + */ + updated_at: string; + /** + * @description The set of permissions for the GitHub app + * @example { + * "issues": "read", + * "deployments": "write" + * } + */ + permissions: { + issues?: string; + checks?: string; + metadata?: string; + contents?: string; + deployments?: string; + } & { [key: string]: string }; + /** + * @description The list of events for the GitHub app + * @example [ + * "label", + * "deployment" + * ] + */ + events: string[]; + /** + * @description The number of installations associated with the GitHub app + * @example 5 + */ + installations_count?: number; + /** @example "Iv1.25b5d1e65ffc4022" */ + client_id?: string; + /** @example "1d4b2097ac622ba702d19de498f005747a8b21d3" */ + client_secret?: string; + /** @example "6fba8f2fc8a7e8f2cca5577eddd82ca7586b3b6b" */ + webhook_secret?: string | null; + /** @example "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEArYxrNYD/iT5CZVpRJu4rBKmmze3PVmT/gCo2ATUvDvZTPTey\nxcGJ3vvrJXazKk06pN05TN29o98jrYz4cengG3YGsXPNEpKsIrEl8NhbnxapEnM9\nJCMRe0P5JcPsfZlX6hmiT7136GRWiGOUba2X9+HKh8QJVLG5rM007TBER9/z9mWm\nrJuNh+m5l320oBQY/Qq3A7wzdEfZw8qm/mIN0FCeoXH1L6B8xXWaAYBwhTEh6SSn\nZHlO1Xu1JWDmAvBCi0RO5aRSKM8q9QEkvvHP4yweAtK3N8+aAbZ7ovaDhyGz8r6r\nzhU1b8Uo0Z2ysf503WqzQgIajr7Fry7/kUwpgQIDAQABAoIBADwJp80Ko1xHPZDy\nfcCKBDfIuPvkmSW6KumbsLMaQv1aGdHDwwTGv3t0ixSay8CGlxMRtRDyZPib6SvQ\n6OH/lpfpbMdW2ErkksgtoIKBVrDilfrcAvrNZu7NxRNbhCSvN8q0s4ICecjbbVQh\nnueSdlA6vGXbW58BHMq68uRbHkP+k+mM9U0mDJ1HMch67wlg5GbayVRt63H7R2+r\nVxcna7B80J/lCEjIYZznawgiTvp3MSanTglqAYi+m1EcSsP14bJIB9vgaxS79kTu\noiSo93leJbBvuGo8QEiUqTwMw4tDksmkLsoqNKQ1q9P7LZ9DGcujtPy4EZsamSJT\ny8OJt0ECgYEA2lxOxJsQk2kI325JgKFjo92mQeUObIvPfSNWUIZQDTjniOI6Gv63\nGLWVFrZcvQBWjMEQraJA9xjPbblV8PtfO87MiJGLWCHFxmPz2dzoedN+2Coxom8m\nV95CLz8QUShuao6u/RYcvUaZEoYs5bHcTmy5sBK80JyEmafJPtCQVxMCgYEAy3ar\nZr3yv4xRPEPMat4rseswmuMooSaK3SKub19WFI5IAtB/e7qR1Rj9JhOGcZz+OQrl\nT78O2OFYlgOIkJPvRMrPpK5V9lslc7tz1FSh3BZMRGq5jSyD7ETSOQ0c8T2O/s7v\nbeEPbVbDe4mwvM24XByH0GnWveVxaDl51ABD65sCgYB3ZAspUkOA5egVCh8kNpnd\nSd6SnuQBE3ySRlT2WEnCwP9Ph6oPgn+oAfiPX4xbRqkL8q/k0BdHQ4h+zNwhk7+h\nWtPYRAP1Xxnc/F+jGjb+DVaIaKGU18MWPg7f+FI6nampl3Q0KvfxwX0GdNhtio8T\nTj1E+SnFwh56SRQuxSh2gwKBgHKjlIO5NtNSflsUYFM+hyQiPiqnHzddfhSG+/3o\nm5nNaSmczJesUYreH5San7/YEy2UxAugvP7aSY2MxB+iGsiJ9WD2kZzTUlDZJ7RV\nUzWsoqBR+eZfVJ2FUWWvy8TpSG6trh4dFxImNtKejCR1TREpSiTV3Zb1dmahK9GV\nrK9NAoGAbBxRLoC01xfxCTgt5BDiBcFVh4fp5yYKwavJPLzHSpuDOrrI9jDn1oKN\nonq5sDU1i391zfQvdrbX4Ova48BN+B7p63FocP/MK5tyyBoT8zQEk2+vWDOw7H/Z\nu5dTCPxTIsoIwUw1I+7yIxqJzLPFgR2gVBwY1ra/8iAqCj+zeBw=\n-----END RSA PRIVATE KEY-----\n" */ + pem?: string; + } | null; + /** + * author_association + * @description How the author is associated with the repository. + * @example OWNER + * @enum {string} + */ + "author-association": + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; + /** Reaction Rollup */ + "reaction-rollup": { + /** Format: uri */ + url: string; + total_count: number; + "+1": number; + "-1": number; + laugh: number; + confused: number; + heart: number; + hooray: number; + eyes: number; + rocket: number; + }; + /** + * Issue + * @description Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. + */ + issue: { + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue + * @example https://api.github.com/repositories/42/issues/1 + */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** + * @description Number uniquely identifying the issue within its repository + * @example 42 + */ + number: number; + /** + * @description State of the issue; either 'open' or 'closed' + * @example open + */ + state: string; + /** + * @description The reason for the current state + * @example not_planned + * @enum {string|null} + */ + state_reason?: ("completed" | "reopened" | "not_planned") | null; + /** + * @description Title of the issue + * @example Widget creation fails in Safari on OS X 10.8 + */ + title: string; + /** + * @description Contents of the issue + * @example It looks like the new widget form is broken on Safari. When I try and create the widget, Safari crashes. This is reproducible on 10.8, but not 10.9. Maybe a browser bug? + */ + body?: string | null; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository + * @example [ + * "bug", + * "registration" + * ] + */ + labels: ( + | string + | { + /** Format: int64 */ + id?: number; + node_id?: string; + /** Format: uri */ + url?: string; + name?: string; + description?: string | null; + color?: string | null; + default?: boolean; + } + )[]; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + milestone: components["schemas"]["nullable-milestone"]; + locked: boolean; + active_lock_reason?: string | null; + comments: number; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + /** Format: date-time */ + closed_at: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + draft?: boolean; + closed_by?: components["schemas"]["nullable-simple-user"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + repository?: components["schemas"]["repository"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Issue Comment + * @description Comments provide a way for people to collaborate on an issue. + */ + "issue-comment": { + /** + * @description Unique identifier of the issue comment + * @example 42 + */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue comment + * @example https://api.github.com/repositories/42/issues/comments/1 + */ + url: string; + /** + * @description Contents of the issue comment + * @example What version of Safari were you using when you observed this bug? + */ + body?: string; + body_text?: string; + body_html?: string; + /** Format: uri */ + html_url: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** Format: uri */ + issue_url: string; + author_association: components["schemas"]["author-association"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Event + * @description Event + */ + event: { + id: string; + type: string | null; + actor: components["schemas"]["actor"]; + repo: { + id: number; + name: string; + /** Format: uri */ + url: string; + }; + org?: components["schemas"]["actor"]; + payload: { + action?: string; + issue?: components["schemas"]["issue"]; + comment?: components["schemas"]["issue-comment"]; + pages?: { + page_name?: string; + title?: string; + summary?: string | null; + action?: string; + sha?: string; + html_url?: string; + }[]; + }; + public: boolean; + /** Format: date-time */ + created_at: string | null; + }; + /** + * Link With Type + * @description Hypermedia Link with Type + */ + "link-with-type": { + href: string; + type: string; + }; + /** + * Feed + * @description Feed + */ + feed: { + /** @example https://github.com/timeline */ + timeline_url: string; + /** @example https://github.com/{user} */ + user_url: string; + /** @example https://github.com/octocat */ + current_user_public_url?: string; + /** @example https://github.com/octocat.private?token=abc123 */ + current_user_url?: string; + /** @example https://github.com/octocat.private.actor?token=abc123 */ + current_user_actor_url?: string; + /** @example https://github.com/octocat-org */ + current_user_organization_url?: string; + /** + * @example [ + * "https://github.com/organizations/github/octocat.private.atom?token=abc123" + * ] + */ + current_user_organization_urls?: string[]; + /** @example https://github.com/security-advisories */ + security_advisories_url?: string; + _links: { + timeline: components["schemas"]["link-with-type"]; + user: components["schemas"]["link-with-type"]; + security_advisories?: components["schemas"]["link-with-type"]; + current_user?: components["schemas"]["link-with-type"]; + current_user_public?: components["schemas"]["link-with-type"]; + current_user_actor?: components["schemas"]["link-with-type"]; + current_user_organization?: components["schemas"]["link-with-type"]; + current_user_organizations?: components["schemas"]["link-with-type"][]; + }; + }; + /** + * Base Gist + * @description Base Gist + */ + "base-gist": { + /** Format: uri */ + url: string; + /** Format: uri */ + forks_url: string; + /** Format: uri */ + commits_url: string; + id: string; + node_id: string; + /** Format: uri */ + git_pull_url: string; + /** Format: uri */ + git_push_url: string; + /** Format: uri */ + html_url: string; + files: { + [key: string]: { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + }; + }; + public: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + description: string | null; + comments: number; + user: components["schemas"]["nullable-simple-user"]; + /** Format: uri */ + comments_url: string; + owner?: components["schemas"]["simple-user"]; + truncated?: boolean; + forks?: unknown[]; + history?: unknown[]; + }; + /** + * Public User + * @description Public User + */ + "public-user": { + login: string; + id: number; + node_id: string; + /** Format: uri */ + avatar_url: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + repos_url: string; + events_url: string; + /** Format: uri */ + received_events_url: string; + type: string; + site_admin: boolean; + name: string | null; + company: string | null; + blog: string | null; + location: string | null; + /** Format: email */ + email: string | null; + hireable: boolean | null; + bio: string | null; + twitter_username?: string | null; + public_repos: number; + public_gists: number; + followers: number; + following: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + plan?: { + collaborators: number; + name: string; + space: number; + private_repos: number; + }; + /** Format: date-time */ + suspended_at?: string | null; + /** @example 1 */ + private_gists?: number; + /** @example 2 */ + total_private_repos?: number; + /** @example 2 */ + owned_private_repos?: number; + /** @example 1 */ + disk_usage?: number; + /** @example 3 */ + collaborators?: number; + }; + /** + * Gist History + * @description Gist History + */ + "gist-history": { + user?: components["schemas"]["nullable-simple-user"]; + version?: string; + /** Format: date-time */ + committed_at?: string; + change_status?: { + total?: number; + additions?: number; + deletions?: number; + }; + /** Format: uri */ + url?: string; + }; + /** + * Gist Simple + * @description Gist Simple + */ + "gist-simple": { + /** @deprecated */ + forks?: + | { + id?: string; + /** Format: uri */ + url?: string; + user?: components["schemas"]["public-user"]; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }[] + | null; + /** @deprecated */ + history?: components["schemas"]["gist-history"][] | null; + /** + * Gist + * @description Gist + */ + fork_of?: { + /** Format: uri */ + url: string; + /** Format: uri */ + forks_url: string; + /** Format: uri */ + commits_url: string; + id: string; + node_id: string; + /** Format: uri */ + git_pull_url: string; + /** Format: uri */ + git_push_url: string; + /** Format: uri */ + html_url: string; + files: { + [key: string]: { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + }; + }; + public: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + description: string | null; + comments: number; + user: components["schemas"]["nullable-simple-user"]; + /** Format: uri */ + comments_url: string; + owner?: components["schemas"]["nullable-simple-user"]; + truncated?: boolean; + forks?: unknown[]; + history?: unknown[]; + } | null; + url?: string; + forks_url?: string; + commits_url?: string; + id?: string; + node_id?: string; + git_pull_url?: string; + git_push_url?: string; + html_url?: string; + files?: { + [key: string]: { + filename?: string; + type?: string; + language?: string; + raw_url?: string; + size?: number; + truncated?: boolean; + content?: string; + } | null; + }; + public?: boolean; + created_at?: string; + updated_at?: string; + description?: string | null; + comments?: number; + user?: string | null; + comments_url?: string; + owner?: components["schemas"]["simple-user"]; + truncated?: boolean; + }; + /** + * Gist Comment + * @description A comment made to a gist. + */ + "gist-comment": { + /** @example 1 */ + id: number; + /** @example MDExOkdpc3RDb21tZW50MQ== */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/gists/a6db0bec360bb87e9418/comments/1 + */ + url: string; + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-18T23:23:56Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-18T23:23:56Z + */ + updated_at: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Gist Commit + * @description Gist Commit + */ + "gist-commit": { + /** + * Format: uri + * @example https://api.github.com/gists/aa5a315d61ae9438b18d/57a7f021a713b1c5a6a199b54cc514735d2d462f + */ + url: string; + /** @example 57a7f021a713b1c5a6a199b54cc514735d2d462f */ + version: string; + user: components["schemas"]["nullable-simple-user"]; + change_status: { + total?: number; + additions?: number; + deletions?: number; + }; + /** + * Format: date-time + * @example 2010-04-14T02:15:15Z + */ + committed_at: string; + }; + /** + * Gitignore Template + * @description Gitignore Template + */ + "gitignore-template": { + /** @example C */ + name: string; + /** + * @example # Object files + * *.o + * + * # Libraries + * *.lib + * *.a + * + * # Shared objects (inc. Windows DLLs) + * *.dll + * *.so + * *.so.* + * *.dylib + * + * # Executables + * *.exe + * *.out + * *.app + */ + source: string; + }; + /** + * License Simple + * @description License Simple + */ + "license-simple": { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MIT */ + spdx_id: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** Format: uri */ + html_url?: string; + }; + /** + * License + * @description License + */ + license: { + /** @example mit */ + key: string; + /** @example MIT License */ + name: string; + /** @example MIT */ + spdx_id: string | null; + /** + * Format: uri + * @example https://api.github.com/licenses/mit + */ + url: string | null; + /** @example MDc6TGljZW5zZW1pdA== */ + node_id: string; + /** + * Format: uri + * @example http://choosealicense.com/licenses/mit/ + */ + html_url: string; + /** @example A permissive license that is short and to the point. It lets people do anything with your code with proper attribution and without warranty. */ + description: string; + /** @example Create a text file (typically named LICENSE or LICENSE.txt) in the root of your source code and copy the text of the license into the file. Replace [year] with the current year and [fullname] with the name (or names) of the copyright holders. */ + implementation: string; + /** + * @example [ + * "commercial-use", + * "modifications", + * "distribution", + * "sublicense", + * "private-use" + * ] + */ + permissions: string[]; + /** + * @example [ + * "include-copyright" + * ] + */ + conditions: string[]; + /** + * @example [ + * "no-liability" + * ] + */ + limitations: string[]; + /** + * @example + * + * The MIT License (MIT) + * + * Copyright (c) [year] [fullname] + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + body: string; + /** @example true */ + featured: boolean; + }; + /** + * Marketplace Listing Plan + * @description Marketplace Listing Plan + */ + "marketplace-listing-plan": { + /** + * Format: uri + * @example https://api.github.com/marketplace_listing/plans/1313 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/marketplace_listing/plans/1313/accounts + */ + accounts_url: string; + /** @example 1313 */ + id: number; + /** @example 3 */ + number: number; + /** @example Pro */ + name: string; + /** @example A professional-grade CI solution */ + description: string; + /** @example 1099 */ + monthly_price_in_cents: number; + /** @example 11870 */ + yearly_price_in_cents: number; + /** @example flat-rate */ + price_model: string; + /** @example true */ + has_free_trial: boolean; + unit_name: string | null; + /** @example published */ + state: string; + /** + * @example [ + * "Up to 25 private repositories", + * "11 concurrent builds" + * ] + */ + bullets: string[]; + }; + /** + * Marketplace Purchase + * @description Marketplace Purchase + */ + "marketplace-purchase": { + url: string; + type: string; + id: number; + login: string; + organization_billing_email?: string; + email?: string | null; + marketplace_pending_change?: { + is_installed?: boolean; + effective_date?: string; + unit_count?: number | null; + id?: number; + plan?: components["schemas"]["marketplace-listing-plan"]; + } | null; + marketplace_purchase: { + billing_cycle?: string; + next_billing_date?: string | null; + is_installed?: boolean; + unit_count?: number | null; + on_free_trial?: boolean; + free_trial_ends_on?: string | null; + updated_at?: string; + plan?: components["schemas"]["marketplace-listing-plan"]; + }; + }; + /** + * Api Overview + * @description Api Overview + */ + "api-overview": { + /** @example true */ + verifiable_password_authentication: boolean; + ssh_key_fingerprints?: { + SHA256_RSA?: string; + SHA256_DSA?: string; + SHA256_ECDSA?: string; + SHA256_ED25519?: string; + }; + /** + * @example [ + * "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl" + * ] + */ + ssh_keys?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + hooks?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + web?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + api?: string[]; + /** + * @example [ + * "127.0.0.1/32" + * ] + */ + git?: string[]; + /** + * @example [ + * "13.65.0.0/16", + * "157.55.204.33/32", + * "2a01:111:f403:f90c::/62" + * ] + */ + packages?: string[]; + /** + * @example [ + * "192.30.252.153/32", + * "192.30.252.154/32" + * ] + */ + pages?: string[]; + /** + * @example [ + * "54.158.161.132", + * "54.226.70.38" + * ] + */ + importer?: string[]; + /** + * @example [ + * "13.64.0.0/16", + * "13.65.0.0/16" + * ] + */ + actions?: string[]; + /** + * @example [ + * "192.168.7.15/32", + * "192.168.7.16/32" + * ] + */ + dependabot?: string[]; + }; + /** + * Repository + * @description A git repository + */ + "nullable-repository": { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + owner: components["schemas"]["simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** + * @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. + * @example 108 + */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: { + id?: number; + node_id?: string; + name?: string; + full_name?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }; + private?: boolean; + html_url?: string; + description?: string; + fork?: boolean; + url?: string; + archive_url?: string; + assignees_url?: string; + blobs_url?: string; + branches_url?: string; + collaborators_url?: string; + comments_url?: string; + commits_url?: string; + compare_url?: string; + contents_url?: string; + contributors_url?: string; + deployments_url?: string; + downloads_url?: string; + events_url?: string; + forks_url?: string; + git_commits_url?: string; + git_refs_url?: string; + git_tags_url?: string; + git_url?: string; + issue_comment_url?: string; + issue_events_url?: string; + issues_url?: string; + keys_url?: string; + labels_url?: string; + languages_url?: string; + merges_url?: string; + milestones_url?: string; + notifications_url?: string; + pulls_url?: string; + releases_url?: string; + ssh_url?: string; + stargazers_url?: string; + statuses_url?: string; + subscribers_url?: string; + subscription_url?: string; + tags_url?: string; + teams_url?: string; + trees_url?: string; + clone_url?: string; + mirror_url?: string; + hooks_url?: string; + svn_url?: string; + homepage?: string; + language?: string; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + pushed_at?: string; + created_at?: string; + updated_at?: string; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + allow_rebase_merge?: boolean; + temp_clone_token?: string; + allow_squash_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + allow_merge_commit?: boolean; + subscribers_count?: number; + network_count?: number; + } | null; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether or not a pull request head branch that is behind its base branch can always be updated even if it is not required to be up to date before merging. + * @default false + * @example false + */ + allow_update_branch?: boolean; + /** + * @deprecated + * @description Whether a squash merge commit can use the pull request title as default. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** @description Whether to allow forking this repo */ + allow_forking?: boolean; + /** + * @description Whether to require contributors to sign off on web-based commits + * @default false + */ + web_commit_signoff_required?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + /** @example "2020-07-09T00:17:42Z" */ + starred_at?: string; + /** @description Whether anonymous git access is enabled for this repository */ + anonymous_access_enabled?: boolean; + } | null; + /** + * Minimal Repository + * @description Minimal Repository + */ + "minimal-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + git_url?: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + ssh_url?: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + clone_url?: string; + mirror_url?: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + svn_url?: string; + homepage?: string | null; + language?: string | null; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + /** @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. */ + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at?: string | null; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + /** @example admin */ + role_name?: string; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + delete_branch_on_merge?: boolean; + subscribers_count?: number; + network_count?: number; + code_of_conduct?: components["schemas"]["code-of-conduct"]; + license?: { + key?: string; + name?: string; + spdx_id?: string; + url?: string; + node_id?: string; + } | null; + /** @example 0 */ + forks?: number; + /** @example 0 */ + open_issues?: number; + /** @example 0 */ + watchers?: number; + allow_forking?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + }; + /** + * Thread + * @description Thread + */ + thread: { + id: string; + repository: components["schemas"]["minimal-repository"]; + subject: { + title: string; + url: string; + latest_comment_url: string; + type: string; + }; + reason: string; + unread: boolean; + updated_at: string; + last_read_at: string | null; + url: string; + /** @example https://api.github.com/notifications/threads/2/subscription */ + subscription_url: string; + }; + /** + * Thread Subscription + * @description Thread Subscription + */ + "thread-subscription": { + /** @example true */ + subscribed: boolean; + ignored: boolean; + reason: string | null; + /** + * Format: date-time + * @example 2012-10-06T21:34:12Z + */ + created_at: string | null; + /** + * Format: uri + * @example https://api.github.com/notifications/threads/1/subscription + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/notifications/threads/1 + */ + thread_url?: string; + /** + * Format: uri + * @example https://api.github.com/repos/1 + */ + repository_url?: string; + }; + /** + * Organization Custom Repository Role + * @description Custom repository roles created by organization administrators + */ + "organization-custom-repository-role": { + /** @description The unique identifier of the custom role. */ + id: number; + /** @description The name of the custom role. */ + name: string; + /** @description A short description about who this role is for or what permissions it grants. */ + description?: string | null; + /** + * @description The system role from which this role inherits permissions. + * @enum {string} + */ + base_role?: "read" | "triage" | "write" | "maintain"; + /** @description A list of additional permissions included in this role. */ + permissions?: string[]; + organization?: components["schemas"]["simple-user"]; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }; + /** + * Codespaces Secret + * @description Secrets for a GitHub Codespace. + */ + "codespaces-org-secret": { + /** + * @description The name of the secret + * @example SECRET_NAME + */ + name: string; + /** + * Format: date-time + * @description The date and time at which the secret was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the secret was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + updated_at: string; + /** + * @description The type of repositories in the organization that the secret is visible to + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @description The API URL at which the list of repositories this secret is visible to can be retrieved + * @example https://api.github.com/orgs/ORGANIZATION/codespaces/secrets/SECRET_NAME/repositories + */ + selected_repositories_url?: string; + }; + /** + * CodespacesPublicKey + * @description The public key used for setting Codespaces secrets. + */ + "codespaces-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + /** @example 2 */ + id?: number; + /** @example https://api.github.com/user/keys/2 */ + url?: string; + /** @example ssh-rsa AAAAB3NzaC1yc2EAAA */ + title?: string; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + }; + /** + * Empty Object + * @description An object without any properties. + */ + "empty-object": { [key: string]: unknown }; + /** + * Organization Full + * @description Organization Full + */ + "organization-full": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + /** @example github */ + name?: string; + /** @example GitHub */ + company?: string; + /** + * Format: uri + * @example https://github.com/blog + */ + blog?: string; + /** @example San Francisco */ + location?: string; + /** + * Format: email + * @example octocat@github.com + */ + email?: string; + /** @example github */ + twitter_username?: string | null; + /** @example true */ + is_verified?: boolean; + /** @example true */ + has_organization_projects: boolean; + /** @example true */ + has_repository_projects: boolean; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** @example Organization */ + type: string; + /** @example 100 */ + total_private_repos?: number; + /** @example 100 */ + owned_private_repos?: number; + /** @example 81 */ + private_gists?: number | null; + /** @example 10000 */ + disk_usage?: number | null; + /** @example 8 */ + collaborators?: number | null; + /** + * Format: email + * @example org@example.com + */ + billing_email?: string | null; + plan?: { + name: string; + space: number; + private_repos: number; + filled_seats?: number; + seats?: number; + }; + default_repository_permission?: string | null; + /** @example true */ + members_can_create_repositories?: boolean | null; + /** @example true */ + two_factor_requirement_enabled?: boolean | null; + /** @example all */ + members_allowed_repository_creation_type?: string; + /** @example true */ + members_can_create_public_repositories?: boolean; + /** @example true */ + members_can_create_private_repositories?: boolean; + /** @example true */ + members_can_create_internal_repositories?: boolean; + /** @example true */ + members_can_create_pages?: boolean; + /** @example true */ + members_can_create_public_pages?: boolean; + /** @example true */ + members_can_create_private_pages?: boolean; + /** @example false */ + members_can_fork_private_repositories?: boolean | null; + /** @example false */ + web_commit_signoff_required?: boolean; + /** Format: date-time */ + updated_at: string; + /** + * @description Whether GitHub Advanced Security is enabled for new repositories and repositories transferred to this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + advanced_security_enabled_for_new_repositories?: boolean; + /** + * @description Whether GitHub Advanced Security is automatically enabled for new repositories and repositories transferred to + * this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + dependabot_alerts_enabled_for_new_repositories?: boolean; + /** + * @description Whether dependabot security updates are automatically enabled for new repositories and repositories transferred + * to this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + dependabot_security_updates_enabled_for_new_repositories?: boolean; + /** + * @description Whether dependency graph is automatically enabled for new repositories and repositories transferred to this + * organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + dependency_graph_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning is automatically enabled for new repositories and repositories transferred to this + * organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + secret_scanning_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning push protection is automatically enabled for new repositories and repositories + * transferred to this organization. + * + * This field is only visible to organization owners or members of a team with the security manager role. + * @example false + */ + secret_scanning_push_protection_enabled_for_new_repositories?: boolean; + }; + /** + * Actions Cache Usage by repository + * @description GitHub Actions Cache Usage by repository. + */ + "actions-cache-usage-by-repository": { + /** + * @description The repository owner and name for the cache usage being shown. + * @example octo-org/Hello-World + */ + full_name: string; + /** + * @description The sum of the size in bytes of all the active cache items in the repository. + * @example 2322142 + */ + active_caches_size_in_bytes: number; + /** + * @description The number of active caches in the repository. + * @example 3 + */ + active_caches_count: number; + }; + /** + * @description The policy that controls the repositories in the organization that are allowed to run GitHub Actions. + * @enum {string} + */ + "enabled-repositories": "all" | "none" | "selected"; + "actions-organization-permissions": { + enabled_repositories: components["schemas"]["enabled-repositories"]; + /** @description The API URL to use to get or set the selected repositories that are allowed to run GitHub Actions, when `enabled_repositories` is set to `selected`. */ + selected_repositories_url?: string; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + "runner-groups-org": { + id: number; + name: string; + visibility: string; + default: boolean; + /** @description Link to the selected repositories resource for this runner group. Not present unless visibility was set to `selected` */ + selected_repositories_url?: string; + runners_url: string; + inherited: boolean; + inherited_allows_public_repositories?: boolean; + allows_public_repositories: boolean; + /** + * @description If `true`, the `restricted_to_workflows` and `selected_workflows` fields cannot be modified. + * @default false + */ + workflow_restrictions_read_only?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + /** + * Actions Secret for an Organization + * @description Secrets for GitHub Actions for an organization. + */ + "organization-actions-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** + * @description Visibility of a secret + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/secrets/my_secret/repositories + */ + selected_repositories_url?: string; + }; + /** + * ActionsPublicKey + * @description The public key used for setting Actions Secrets. + */ + "actions-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + /** @example 2 */ + id?: number; + /** @example https://api.github.com/user/keys/2 */ + url?: string; + /** @example ssh-rsa AAAAB3NzaC1yc2EAAA */ + title?: string; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + }; + /** + * Codespace machine + * @description A description of the machine powering a codespace. + */ + "nullable-codespace-machine": { + /** + * @description The name of the machine. + * @example standardLinux + */ + name: string; + /** + * @description The display name of the machine includes cores, memory, and storage. + * @example 4 cores, 8 GB RAM, 64 GB storage + */ + display_name: string; + /** + * @description The operating system of the machine. + * @example linux + */ + operating_system: string; + /** + * @description How much storage is available to the codespace. + * @example 68719476736 + */ + storage_in_bytes: number; + /** + * @description How much memory is available to the codespace. + * @example 8589934592 + */ + memory_in_bytes: number; + /** + * @description How many cores are available to the codespace. + * @example 4 + */ + cpus: number; + /** + * @description Whether a prebuild is currently available when creating a codespace for this machine and repository. If a branch was not specified as a ref, the default branch will be assumed. Value will be "null" if prebuilds are not supported or prebuild availability could not be determined. Value will be "none" if no prebuild is available. Latest values "ready" and "in_progress" indicate the prebuild availability status. + * @example ready + * @enum {string|null} + */ + prebuild_availability: ("none" | "ready" | "in_progress") | null; + } | null; + /** + * Codespace + * @description A codespace. + */ + codespace: { + /** @example 1 */ + id: number; + /** + * @description Automatically generated name of this codespace. + * @example monalisa-octocat-hello-world-g4wpq6h95q + */ + name: string; + /** + * @description Display name for this codespace. + * @example bookish space pancake + */ + display_name?: string | null; + /** + * @description UUID identifying this codespace's environment. + * @example 26a7c758-7299-4a73-b978-5a92a7ae98a0 + */ + environment_id: string | null; + owner: components["schemas"]["simple-user"]; + billable_owner: components["schemas"]["simple-user"]; + repository: components["schemas"]["minimal-repository"]; + machine: components["schemas"]["nullable-codespace-machine"]; + /** + * @description Path to devcontainer.json from repo root used to create Codespace. + * @example .devcontainer/example/devcontainer.json + */ + devcontainer_path?: string | null; + /** + * @description Whether the codespace was created from a prebuild. + * @example false + */ + prebuild: boolean | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @description Last known time this codespace was started. + * @example 2011-01-26T19:01:12Z + */ + last_used_at: string; + /** + * @description State of this codespace. + * @example Available + * @enum {string} + */ + state: + | "Unknown" + | "Created" + | "Queued" + | "Provisioning" + | "Available" + | "Awaiting" + | "Unavailable" + | "Deleted" + | "Moved" + | "Shutdown" + | "Archived" + | "Starting" + | "ShuttingDown" + | "Failed" + | "Exporting" + | "Updating" + | "Rebuilding"; + /** + * Format: uri + * @description API URL for this codespace. + */ + url: string; + /** @description Details about the codespace's git repository. */ + git_status: { + /** + * @description The number of commits the local repository is ahead of the remote. + * @example 0 + */ + ahead?: number; + /** + * @description The number of commits the local repository is behind the remote. + * @example 0 + */ + behind?: number; + /** @description Whether the local repository has unpushed changes. */ + has_unpushed_changes?: boolean; + /** @description Whether the local repository has uncommitted changes. */ + has_uncommitted_changes?: boolean; + /** + * @description The current branch (or SHA if in detached HEAD state) of the local repository. + * @example main + */ + ref?: string; + }; + /** + * @description The Azure region where this codespace is located. + * @example WestUs2 + * @enum {string} + */ + location: "EastUs" | "SouthEastAsia" | "WestEurope" | "WestUs2"; + /** + * @description The number of minutes of inactivity after which this codespace will be automatically stopped. + * @example 60 + */ + idle_timeout_minutes: number | null; + /** + * Format: uri + * @description URL to access this codespace on the web. + */ + web_url: string; + /** + * Format: uri + * @description API URL to access available alternate machine types for this codespace. + */ + machines_url: string; + /** + * Format: uri + * @description API URL to start this codespace. + */ + start_url: string; + /** + * Format: uri + * @description API URL to stop this codespace. + */ + stop_url: string; + /** + * Format: uri + * @description API URL for the Pull Request associated with this codespace, if any. + */ + pulls_url: string | null; + recent_folders: string[]; + runtime_constraints?: { + /** @description The privacy settings a user can select from when forwarding a port. */ + allowed_port_privacy_settings?: string[] | null; + }; + /** @description Whether or not a codespace has a pending async operation. This would mean that the codespace is temporarily unavailable. The only thing that you can do with a codespace in this state is delete it. */ + pending_operation?: boolean | null; + /** @description Text to show user when codespace is disabled by a pending operation */ + pending_operation_disabled_reason?: string | null; + /** @description Text to show user when codespace idle timeout minutes has been overriden by an organization policy */ + idle_timeout_notice?: string | null; + /** + * @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). + * @example 60 + */ + retention_period_minutes?: number | null; + /** + * Format: date-time + * @description When a codespace will be auto-deleted based on the "retention_period_minutes" and "last_used_at" + * @example 2011-01-26T20:01:12Z + */ + retention_expires_at?: string | null; + /** + * @description The text to display to a user when a codespace has been stopped for a potentially actionable reason. + * @example you've used 100% of your spending limit for Codespaces + */ + last_known_stop_notice?: string | null; + }; + /** + * Dependabot Secret for an Organization + * @description Secrets for GitHub Dependabot for an organization. + */ + "organization-dependabot-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** + * @description Visibility of a secret + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @example https://api.github.com/organizations/org/dependabot/secrets/my_secret/repositories + */ + selected_repositories_url?: string; + }; + /** + * DependabotPublicKey + * @description The public key used for setting Dependabot Secrets. + */ + "dependabot-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + }; + /** + * Organization Invitation + * @description Organization Invitation + */ + "organization-invitation": { + id: number; + login: string | null; + email: string | null; + role: string; + created_at: string; + failed_at?: string | null; + failed_reason?: string | null; + inviter: components["schemas"]["simple-user"]; + team_count: number; + /** @example "MDIyOk9yZ2FuaXphdGlvbkludml0YXRpb24x" */ + node_id: string; + /** @example "https://api.github.com/organizations/16/invitations/1/teams" */ + invitation_teams_url: string; + }; + /** + * Organization Fine-Grained Permission + * @description Fine-grained permissions available for the organization + */ + "organization-fine-grained-permission": { + name: string; + description: string; + }; + /** + * Org Hook + * @description Org Hook + */ + "org-hook": { + /** @example 1 */ + id: number; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1/pings + */ + ping_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/hooks/1/deliveries + */ + deliveries_url?: string; + /** @example web */ + name: string; + /** + * @example [ + * "push", + * "pull_request" + * ] + */ + events: string[]; + /** @example true */ + active: boolean; + config: { + /** @example "http://example.com/2" */ + url?: string; + /** @example "0" */ + insecure_ssl?: string; + /** @example "form" */ + content_type?: string; + /** @example "********" */ + secret?: string; + }; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + type: string; + }; + /** + * @description The type of GitHub user that can comment, open issues, or create pull requests while the interaction limit is in effect. + * @example collaborators_only + * @enum {string} + */ + "interaction-group": + | "existing_users" + | "contributors_only" + | "collaborators_only"; + /** + * Interaction Limits + * @description Interaction limit settings. + */ + "interaction-limit-response": { + limit: components["schemas"]["interaction-group"]; + /** @example repository */ + origin: string; + /** + * Format: date-time + * @example 2018-08-17T04:18:39Z + */ + expires_at: string; + }; + /** + * @description The duration of the interaction restriction. Default: `one_day`. + * @example one_month + * @enum {string} + */ + "interaction-expiry": + | "one_day" + | "three_days" + | "one_week" + | "one_month" + | "six_months"; + /** + * Interaction Restrictions + * @description Limit interactions to a specific type of user for a specified duration + */ + "interaction-limit": { + limit: components["schemas"]["interaction-group"]; + expiry?: components["schemas"]["interaction-expiry"]; + }; + /** + * Team Simple + * @description Groups of organization members that gives permissions on specified repositories. + */ + "nullable-team-simple": { + /** + * @description Unique identifier of the team + * @example 1 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * @description Name of the team + * @example Justice League + */ + name: string; + /** + * @description Description of the team + * @example A great team. + */ + description: string | null; + /** + * @description Permission that the team will have for its repositories + * @example admin + */ + permission: string; + /** + * @description The level of privacy this team should have + * @example closed + */ + privacy?: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + /** @example justice-league */ + slug: string; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + } | null; + /** + * Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + team: { + id: number; + node_id: string; + name: string; + slug: string; + description: string | null; + privacy?: string; + permission: string; + permissions?: { + pull: boolean; + triage: boolean; + push: boolean; + maintain: boolean; + admin: boolean; + }; + /** Format: uri */ + url: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + members_url: string; + /** Format: uri */ + repositories_url: string; + parent: components["schemas"]["nullable-team-simple"]; + }; + /** + * Org Membership + * @description Org Membership + */ + "org-membership": { + /** + * Format: uri + * @example https://api.github.com/orgs/octocat/memberships/defunkt + */ + url: string; + /** + * @description The state of the member in the organization. The `pending` state indicates the user has not yet accepted an invitation. + * @example active + * @enum {string} + */ + state: "active" | "pending"; + /** + * @description The user's membership type in the organization. + * @example admin + * @enum {string} + */ + role: "admin" | "member" | "billing_manager"; + /** + * Format: uri + * @example https://api.github.com/orgs/octocat + */ + organization_url: string; + organization: components["schemas"]["organization-simple"]; + user: components["schemas"]["nullable-simple-user"]; + permissions?: { + can_create_repository: boolean; + }; + }; + /** + * Migration + * @description A migration. + */ + migration: { + /** @example 79 */ + id: number; + owner: components["schemas"]["nullable-simple-user"]; + /** @example 0b989ba4-242f-11e5-81e1-c7b6966d2516 */ + guid: string; + /** @example pending */ + state: string; + /** @example true */ + lock_repositories: boolean; + exclude_metadata: boolean; + exclude_git_data: boolean; + exclude_attachments: boolean; + exclude_releases: boolean; + exclude_owner_projects: boolean; + org_metadata_only: boolean; + repositories: components["schemas"]["repository"][]; + /** + * Format: uri + * @example https://api.github.com/orgs/octo-org/migrations/79 + */ + url: string; + /** + * Format: date-time + * @example 2015-07-06T15:33:38-07:00 + */ + created_at: string; + /** + * Format: date-time + * @example 2015-07-06T15:33:38-07:00 + */ + updated_at: string; + node_id: string; + /** Format: uri */ + archive_url?: string; + exclude?: unknown[]; + }; + /** + * Minimal Repository + * @description Minimal Repository + */ + "nullable-minimal-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + git_url?: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + ssh_url?: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + clone_url?: string; + mirror_url?: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + svn_url?: string; + homepage?: string | null; + language?: string | null; + forks_count?: number; + stargazers_count?: number; + watchers_count?: number; + /** @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. */ + size?: number; + default_branch?: string; + open_issues_count?: number; + is_template?: boolean; + topics?: string[]; + has_issues?: boolean; + has_projects?: boolean; + has_wiki?: boolean; + has_pages?: boolean; + has_downloads?: boolean; + archived?: boolean; + disabled?: boolean; + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at?: string | null; + permissions?: { + admin?: boolean; + maintain?: boolean; + push?: boolean; + triage?: boolean; + pull?: boolean; + }; + /** @example admin */ + role_name?: string; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + delete_branch_on_merge?: boolean; + subscribers_count?: number; + network_count?: number; + code_of_conduct?: components["schemas"]["code-of-conduct"]; + license?: { + key?: string; + name?: string; + spdx_id?: string; + url?: string; + node_id?: string; + } | null; + /** @example 0 */ + forks?: number; + /** @example 0 */ + open_issues?: number; + /** @example 0 */ + watchers?: number; + allow_forking?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + } | null; + /** + * Package + * @description A software package + */ + package: { + /** + * @description Unique identifier of the package. + * @example 1 + */ + id: number; + /** + * @description The name of the package. + * @example super-linter + */ + name: string; + /** + * @example docker + * @enum {string} + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** @example https://api.github.com/orgs/github/packages/container/super-linter */ + url: string; + /** @example https://github.com/orgs/github/packages/container/package/super-linter */ + html_url: string; + /** + * @description The number of versions of the package. + * @example 1 + */ + version_count: number; + /** + * @example private + * @enum {string} + */ + visibility: "private" | "public"; + owner?: components["schemas"]["nullable-simple-user"]; + repository?: components["schemas"]["nullable-minimal-repository"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Package Version + * @description A version of a software package + */ + "package-version": { + /** + * @description Unique identifier of the package version. + * @example 1 + */ + id: number; + /** + * @description The name of the package version. + * @example latest + */ + name: string; + /** @example https://api.github.com/orgs/github/packages/container/super-linter/versions/786068 */ + url: string; + /** @example https://github.com/orgs/github/packages/container/package/super-linter */ + package_html_url: string; + /** @example https://github.com/orgs/github/packages/container/super-linter/786068 */ + html_url?: string; + /** @example MIT */ + license?: string; + description?: string; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + deleted_at?: string; + /** Package Version Metadata */ + metadata?: { + /** + * @example docker + * @enum {string} + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** Container Metadata */ + container?: { + tags: string[]; + }; + /** Docker Metadata */ + docker?: { + tag?: string[]; + } & { + tags: unknown; + }; + }; + }; + /** + * Project + * @description Projects are a way to organize columns and cards of work. + */ + project: { + /** + * Format: uri + * @example https://api.github.com/repos/api-playground/projects-test + */ + owner_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/1002604 + */ + url: string; + /** + * Format: uri + * @example https://github.com/api-playground/projects-test/projects/12 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/1002604/columns + */ + columns_url: string; + /** @example 1002604 */ + id: number; + /** @example MDc6UHJvamVjdDEwMDI2MDQ= */ + node_id: string; + /** + * @description Name of the project + * @example Week One Sprint + */ + name: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body: string | null; + /** @example 1 */ + number: number; + /** + * @description State of the project; either 'open' or 'closed' + * @example open + */ + state: string; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * @description The baseline permission that all organization members have on this project. Only present if owner is an organization. + * @enum {string} + */ + organization_permission?: "read" | "write" | "admin" | "none"; + /** @description Whether or not this project can be seen by everyone. Only present if owner is an organization. */ + private?: boolean; + }; + /** + * Team Simple + * @description Groups of organization members that gives permissions on specified repositories. + */ + "team-simple": { + /** + * @description Unique identifier of the team + * @example 1 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * @description Name of the team + * @example Justice League + */ + name: string; + /** + * @description Description of the team + * @example A great team. + */ + description: string | null; + /** + * @description Permission that the team will have for its repositories + * @example admin + */ + permission: string; + /** + * @description The level of privacy this team should have + * @example closed + */ + privacy?: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + /** @example justice-league */ + slug: string; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + }; + "actions-billing-usage": { + /** @description The sum of the free and paid GitHub Actions minutes used. */ + total_minutes_used: number; + /** @description The total paid GitHub Actions minutes used. */ + total_paid_minutes_used: number; + /** @description The amount of free GitHub Actions minutes available. */ + included_minutes: number; + minutes_used_breakdown: { + /** @description Total minutes used on Ubuntu runner machines. */ + UBUNTU?: number; + /** @description Total minutes used on macOS runner machines. */ + MACOS?: number; + /** @description Total minutes used on Windows runner machines. */ + WINDOWS?: number; + /** @description Total minutes used on Ubuntu 4 core runner machines. */ + ubuntu_4_core?: number; + /** @description Total minutes used on Ubuntu 8 core runner machines. */ + ubuntu_8_core?: number; + /** @description Total minutes used on Ubuntu 16 core runner machines. */ + ubuntu_16_core?: number; + /** @description Total minutes used on Ubuntu 32 core runner machines. */ + ubuntu_32_core?: number; + /** @description Total minutes used on Ubuntu 64 core runner machines. */ + ubuntu_64_core?: number; + /** @description Total minutes used on Windows 4 core runner machines. */ + windows_4_core?: number; + /** @description Total minutes used on Windows 8 core runner machines. */ + windows_8_core?: number; + /** @description Total minutes used on Windows 16 core runner machines. */ + windows_16_core?: number; + /** @description Total minutes used on Windows 32 core runner machines. */ + windows_32_core?: number; + /** @description Total minutes used on Windows 64 core runner machines. */ + windows_64_core?: number; + /** @description Total minutes used on all runner machines. */ + total?: number; + }; + }; + "packages-billing-usage": { + /** @description Sum of the free and paid storage space (GB) for GitHuub Packages. */ + total_gigabytes_bandwidth_used: number; + /** @description Total paid storage space (GB) for GitHuub Packages. */ + total_paid_gigabytes_bandwidth_used: number; + /** @description Free storage space (GB) for GitHub Packages. */ + included_gigabytes_bandwidth: number; + }; + "combined-billing-usage": { + /** @description Numbers of days left in billing cycle. */ + days_left_in_billing_cycle: number; + /** @description Estimated storage space (GB) used in billing cycle. */ + estimated_paid_storage_for_month: number; + /** @description Estimated sum of free and paid storage space (GB) used in billing cycle. */ + estimated_storage_for_month: number; + }; + /** + * Team Organization + * @description Team Organization + */ + "team-organization": { + /** @example github */ + login: string; + /** @example 1 */ + id: number; + /** @example MDEyOk9yZ2FuaXphdGlvbjE= */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/repos + */ + repos_url: string; + /** + * Format: uri + * @example https://api.github.com/orgs/github/events + */ + events_url: string; + /** @example https://api.github.com/orgs/github/hooks */ + hooks_url: string; + /** @example https://api.github.com/orgs/github/issues */ + issues_url: string; + /** @example https://api.github.com/orgs/github/members{/member} */ + members_url: string; + /** @example https://api.github.com/orgs/github/public_members{/member} */ + public_members_url: string; + /** @example https://github.com/images/error/octocat_happy.gif */ + avatar_url: string; + /** @example A great organization */ + description: string | null; + /** @example github */ + name?: string; + /** @example GitHub */ + company?: string; + /** + * Format: uri + * @example https://github.com/blog + */ + blog?: string; + /** @example San Francisco */ + location?: string; + /** + * Format: email + * @example octocat@github.com + */ + email?: string; + /** @example github */ + twitter_username?: string | null; + /** @example true */ + is_verified?: boolean; + /** @example true */ + has_organization_projects: boolean; + /** @example true */ + has_repository_projects: boolean; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** @example Organization */ + type: string; + /** @example 100 */ + total_private_repos?: number; + /** @example 100 */ + owned_private_repos?: number; + /** @example 81 */ + private_gists?: number | null; + /** @example 10000 */ + disk_usage?: number | null; + /** @example 8 */ + collaborators?: number | null; + /** + * Format: email + * @example org@example.com + */ + billing_email?: string | null; + plan?: { + name: string; + space: number; + private_repos: number; + filled_seats?: number; + seats?: number; + }; + default_repository_permission?: string | null; + /** @example true */ + members_can_create_repositories?: boolean | null; + /** @example true */ + two_factor_requirement_enabled?: boolean | null; + /** @example all */ + members_allowed_repository_creation_type?: string; + /** @example true */ + members_can_create_public_repositories?: boolean; + /** @example true */ + members_can_create_private_repositories?: boolean; + /** @example true */ + members_can_create_internal_repositories?: boolean; + /** @example true */ + members_can_create_pages?: boolean; + /** @example true */ + members_can_create_public_pages?: boolean; + /** @example true */ + members_can_create_private_pages?: boolean; + /** @example false */ + members_can_fork_private_repositories?: boolean | null; + /** @example false */ + web_commit_signoff_required?: boolean; + /** Format: date-time */ + updated_at: string; + }; + /** + * Full Team + * @description Groups of organization members that gives permissions on specified repositories. + */ + "team-full": { + /** + * @description Unique identifier of the team + * @example 42 + */ + id: number; + /** @example MDQ6VGVhbTE= */ + node_id: string; + /** + * Format: uri + * @description URL for the team + * @example https://api.github.com/organizations/1/team/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/orgs/rails/teams/core + */ + html_url: string; + /** + * @description Name of the team + * @example Developers + */ + name: string; + /** @example justice-league */ + slug: string; + /** @example A great team. */ + description: string | null; + /** + * @description The level of privacy this team should have + * @example closed + * @enum {string} + */ + privacy?: "closed" | "secret"; + /** + * @description Permission that the team will have for its repositories + * @example push + */ + permission: string; + /** @example https://api.github.com/organizations/1/team/1/members{/member} */ + members_url: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/1/repos + */ + repositories_url: string; + parent?: components["schemas"]["nullable-team-simple"]; + /** @example 3 */ + members_count: number; + /** @example 10 */ + repos_count: number; + /** + * Format: date-time + * @example 2017-07-14T16:53:42Z + */ + created_at: string; + /** + * Format: date-time + * @example 2017-08-17T12:37:15Z + */ + updated_at: string; + organization: components["schemas"]["team-organization"]; + /** + * @description Distinguished Name (DN) that team maps to within LDAP environment + * @example uid=example,ou=users,dc=github,dc=com + */ + ldap_dn?: string; + }; + /** + * Team Discussion + * @description A team discussion is a persistent record of a free-form conversation within a team. + */ + "team-discussion": { + author: components["schemas"]["nullable-simple-user"]; + /** + * @description The main text of the discussion. + * @example Please suggest improvements to our workflow in comments. + */ + body: string; + /** @example

Hi! This is an area for us to collaborate as a team

*/ + body_html: string; + /** + * @description The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. + * @example 0307116bbf7ced493b8d8a346c650b71 + */ + body_version: string; + /** @example 0 */ + comments_count: number; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027/discussions/1/comments + */ + comments_url: string; + /** + * Format: date-time + * @example 2018-01-25T18:56:31Z + */ + created_at: string; + /** Format: date-time */ + last_edited_at: string | null; + /** + * Format: uri + * @example https://github.com/orgs/github/teams/justice-league/discussions/1 + */ + html_url: string; + /** @example MDE0OlRlYW1EaXNjdXNzaW9uMQ== */ + node_id: string; + /** + * @description The unique sequence number of a team discussion. + * @example 42 + */ + number: number; + /** + * @description Whether or not this discussion should be pinned for easy retrieval. + * @example true + */ + pinned: boolean; + /** + * @description Whether or not this discussion should be restricted to team members and organization administrators. + * @example true + */ + private: boolean; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027 + */ + team_url: string; + /** + * @description The title of the discussion. + * @example How can we improve our workflow? + */ + title: string; + /** + * Format: date-time + * @example 2018-01-25T18:56:31Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2343027/discussions/1 + */ + url: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Team Discussion Comment + * @description A reply to a discussion within a team. + */ + "team-discussion-comment": { + author: components["schemas"]["nullable-simple-user"]; + /** + * @description The main text of the comment. + * @example I agree with this suggestion. + */ + body: string; + /** @example

Do you like apples?

*/ + body_html: string; + /** + * @description The current version of the body content. If provided, this update operation will be rejected if the given version does not match the latest version on the server. + * @example 0307116bbf7ced493b8d8a346c650b71 + */ + body_version: string; + /** + * Format: date-time + * @example 2018-01-15T23:53:58Z + */ + created_at: string; + /** Format: date-time */ + last_edited_at: string | null; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2403582/discussions/1 + */ + discussion_url: string; + /** + * Format: uri + * @example https://github.com/orgs/github/teams/justice-league/discussions/1/comments/1 + */ + html_url: string; + /** @example MDIxOlRlYW1EaXNjdXNzaW9uQ29tbWVudDE= */ + node_id: string; + /** + * @description The unique sequence number of a team discussion comment. + * @example 42 + */ + number: number; + /** + * Format: date-time + * @example 2018-01-15T23:53:58Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/organizations/1/team/2403582/discussions/1/comments/1 + */ + url: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Reaction + * @description Reactions to conversations provide a way to help people express their feelings more simply and effectively. + */ + reaction: { + /** @example 1 */ + id: number; + /** @example MDg6UmVhY3Rpb24x */ + node_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description The reaction to use + * @example heart + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** + * Format: date-time + * @example 2016-05-20T20:09:31Z + */ + created_at: string; + }; + /** + * Team Membership + * @description Team Membership + */ + "team-membership": { + /** Format: uri */ + url: string; + /** + * @description The role of the user in the team. + * @default member + * @example member + * @enum {string} + */ + role: "member" | "maintainer"; + /** + * @description The state of the user's membership in the team. + * @enum {string} + */ + state: "active" | "pending"; + }; + /** + * Team Project + * @description A team's access to a project. + */ + "team-project": { + owner_url: string; + url: string; + html_url: string; + columns_url: string; + id: number; + node_id: string; + name: string; + body: string | null; + number: number; + state: string; + creator: components["schemas"]["simple-user"]; + created_at: string; + updated_at: string; + /** @description The organization permission for this project. Only present when owner is an organization. */ + organization_permission?: string; + /** @description Whether the project is private or not. Only present when owner is an organization. */ + private?: boolean; + permissions: { + read: boolean; + write: boolean; + admin: boolean; + }; + }; + /** + * Team Repository + * @description A team's access to a repository. + */ + "team-repository": { + /** + * @description Unique identifier of the repository + * @example 42 + */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + license: components["schemas"]["nullable-license-simple"]; + forks: number; + permissions?: { + admin: boolean; + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + }; + /** @example admin */ + role_name?: string; + owner: components["schemas"]["nullable-simple-user"]; + /** + * @description Whether the repository is private or public. + * @default false + */ + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** @example 108 */ + size: number; + /** + * @description The default branch of the repository. + * @example master + */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + topics?: string[]; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki: boolean; + has_pages: boolean; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads: boolean; + /** + * @description Whether the repository is archived. + * @default false + */ + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @default public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string | null; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow forking this repo + * @default false + * @example false + */ + allow_forking?: boolean; + /** + * @description Whether to require contributors to sign off on web-based commits + * @default false + * @example false + */ + web_commit_signoff_required?: boolean; + subscribers_count?: number; + network_count?: number; + open_issues: number; + watchers: number; + master_branch?: string; + }; + /** + * Project Card + * @description Project cards represent a scope of work. + */ + "project-card": { + /** + * Format: uri + * @example https://api.github.com/projects/columns/cards/1478 + */ + url: string; + /** + * @description The project card's ID + * @example 42 + */ + id: number; + /** @example MDExOlByb2plY3RDYXJkMTQ3OA== */ + node_id: string; + /** @example Add payload for delete Project column */ + note: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2016-09-05T14:21:06Z + */ + created_at: string; + /** + * Format: date-time + * @example 2016-09-05T14:20:22Z + */ + updated_at: string; + /** + * @description Whether or not the card is archived + * @example false + */ + archived?: boolean; + column_name?: string; + project_id?: string; + /** + * Format: uri + * @example https://api.github.com/projects/columns/367 + */ + column_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/api-playground/projects-test/issues/3 + */ + content_url?: string; + /** + * Format: uri + * @example https://api.github.com/projects/120 + */ + project_url: string; + }; + /** + * Project Column + * @description Project columns contain cards of work. + */ + "project-column": { + /** + * Format: uri + * @example https://api.github.com/projects/columns/367 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/projects/120 + */ + project_url: string; + /** + * Format: uri + * @example https://api.github.com/projects/columns/367/cards + */ + cards_url: string; + /** + * @description The unique identifier of the project column + * @example 42 + */ + id: number; + /** @example MDEzOlByb2plY3RDb2x1bW4zNjc= */ + node_id: string; + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + /** + * Format: date-time + * @example 2016-09-05T14:18:44Z + */ + created_at: string; + /** + * Format: date-time + * @example 2016-09-05T14:22:28Z + */ + updated_at: string; + }; + /** + * Project Collaborator Permission + * @description Project Collaborator Permission + */ + "project-collaborator-permission": { + permission: string; + user: components["schemas"]["nullable-simple-user"]; + }; + /** Rate Limit */ + "rate-limit": { + limit: number; + remaining: number; + reset: number; + used: number; + }; + /** + * Rate Limit Overview + * @description Rate Limit Overview + */ + "rate-limit-overview": { + resources: { + core: components["schemas"]["rate-limit"]; + graphql?: components["schemas"]["rate-limit"]; + search: components["schemas"]["rate-limit"]; + source_import?: components["schemas"]["rate-limit"]; + integration_manifest?: components["schemas"]["rate-limit"]; + code_scanning_upload?: components["schemas"]["rate-limit"]; + actions_runner_registration?: components["schemas"]["rate-limit"]; + scim?: components["schemas"]["rate-limit"]; + dependency_snapshots?: components["schemas"]["rate-limit"]; + }; + rate: components["schemas"]["rate-limit"]; + }; + /** + * Code Of Conduct Simple + * @description Code of Conduct Simple + */ + "code-of-conduct-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/github/docs/community/code_of_conduct + */ + url: string; + /** @example citizen_code_of_conduct */ + key: string; + /** @example Citizen Code of Conduct */ + name: string; + /** + * Format: uri + * @example https://github.com/github/docs/blob/main/CODE_OF_CONDUCT.md + */ + html_url: string | null; + }; + "security-and-analysis": { + advanced_security?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + secret_scanning?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + secret_scanning_push_protection?: { + /** @enum {string} */ + status?: "enabled" | "disabled"; + }; + } | null; + /** + * Full Repository + * @description Full Repository + */ + "full-repository": { + /** @example 1296269 */ + id: number; + /** @example MDEwOlJlcG9zaXRvcnkxMjk2MjY5 */ + node_id: string; + /** @example Hello-World */ + name: string; + /** @example octocat/Hello-World */ + full_name: string; + owner: components["schemas"]["simple-user"]; + private: boolean; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World + */ + html_url: string; + /** @example This your first repo! */ + description: string | null; + fork: boolean; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World + */ + url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref} */ + archive_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/assignees{/user} */ + assignees_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha} */ + blobs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/branches{/branch} */ + branches_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator} */ + collaborators_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/comments{/number} */ + comments_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/commits{/sha} */ + commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head} */ + compare_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/contents/{+path} */ + contents_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/contributors + */ + contributors_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/deployments + */ + deployments_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/downloads + */ + downloads_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/events + */ + events_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/forks + */ + forks_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/commits{/sha} */ + git_commits_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/refs{/sha} */ + git_refs_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/tags{/sha} */ + git_tags_url: string; + /** @example git:github.com/octocat/Hello-World.git */ + git_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/comments{/number} */ + issue_comment_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues/events{/number} */ + issue_events_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/issues{/number} */ + issues_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/keys{/key_id} */ + keys_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/labels{/name} */ + labels_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/languages + */ + languages_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/merges + */ + merges_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/milestones{/number} */ + milestones_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating} */ + notifications_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/pulls{/number} */ + pulls_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/releases{/id} */ + releases_url: string; + /** @example git@github.com:octocat/Hello-World.git */ + ssh_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/stargazers + */ + stargazers_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/statuses/{sha} */ + statuses_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscribers + */ + subscribers_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/subscription + */ + subscription_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/tags + */ + tags_url: string; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/teams + */ + teams_url: string; + /** @example http://api.github.com/repos/octocat/Hello-World/git/trees{/sha} */ + trees_url: string; + /** @example https://github.com/octocat/Hello-World.git */ + clone_url: string; + /** + * Format: uri + * @example git:git.example.com/octocat/Hello-World + */ + mirror_url: string | null; + /** + * Format: uri + * @example http://api.github.com/repos/octocat/Hello-World/hooks + */ + hooks_url: string; + /** + * Format: uri + * @example https://svn.github.com/octocat/Hello-World + */ + svn_url: string; + /** + * Format: uri + * @example https://github.com + */ + homepage: string | null; + language: string | null; + /** @example 9 */ + forks_count: number; + /** @example 80 */ + stargazers_count: number; + /** @example 80 */ + watchers_count: number; + /** + * @description The size of the repository. Size is calculated hourly. When a repository is initially created, the size is 0. + * @example 108 + */ + size: number; + /** @example master */ + default_branch: string; + /** @example 0 */ + open_issues_count: number; + /** @example true */ + is_template?: boolean; + /** + * @example [ + * "octocat", + * "atom", + * "electron", + * "API" + * ] + */ + topics?: string[]; + /** @example true */ + has_issues: boolean; + /** @example true */ + has_projects: boolean; + /** @example true */ + has_wiki: boolean; + has_pages: boolean; + /** @example true */ + has_downloads: boolean; + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** + * @description The repository visibility: public, private, or internal. + * @example public + */ + visibility?: string; + /** + * Format: date-time + * @example 2011-01-26T19:06:43Z + */ + pushed_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:14:43Z + */ + updated_at: string; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + /** @example true */ + allow_rebase_merge?: boolean; + template_repository?: components["schemas"]["nullable-repository"]; + temp_clone_token?: string | null; + /** @example true */ + allow_squash_merge?: boolean; + /** @example false */ + allow_auto_merge?: boolean; + /** @example false */ + delete_branch_on_merge?: boolean; + /** @example true */ + allow_merge_commit?: boolean; + /** @example true */ + allow_update_branch?: boolean; + /** @example false */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @example PR_TITLE + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @example PR_BODY + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @example PR_TITLE + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @example PR_BODY + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** @example true */ + allow_forking?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + /** @example 42 */ + subscribers_count: number; + /** @example 0 */ + network_count: number; + license: components["schemas"]["nullable-license-simple"]; + organization?: components["schemas"]["nullable-simple-user"]; + parent?: components["schemas"]["repository"]; + source?: components["schemas"]["repository"]; + forks: number; + master_branch?: string; + open_issues: number; + watchers: number; + /** + * @description Whether anonymous git access is allowed. + * @default true + */ + anonymous_access_enabled?: boolean; + code_of_conduct?: components["schemas"]["code-of-conduct-simple"]; + security_and_analysis?: components["schemas"]["security-and-analysis"]; + }; + /** + * Artifact + * @description An artifact + */ + artifact: { + /** @example 5 */ + id: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** + * @description The name of the artifact. + * @example AdventureWorks.Framework + */ + name: string; + /** + * @description The size in bytes of the artifact. + * @example 12345 + */ + size_in_bytes: number; + /** @example https://api.github.com/repos/github/hello-world/actions/artifacts/5 */ + url: string; + /** @example https://api.github.com/repos/github/hello-world/actions/artifacts/5/zip */ + archive_download_url: string; + /** @description Whether or not the artifact has expired. */ + expired: boolean; + /** Format: date-time */ + created_at: string | null; + /** Format: date-time */ + expires_at: string | null; + /** Format: date-time */ + updated_at: string | null; + workflow_run?: { + /** @example 10 */ + id?: number; + /** @example 42 */ + repository_id?: number; + /** @example 42 */ + head_repository_id?: number; + /** @example main */ + head_branch?: string; + /** @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d */ + head_sha?: string; + } | null; + }; + /** + * Repository actions caches + * @description Repository actions caches + */ + "actions-cache-list": { + /** + * @description Total number of caches + * @example 2 + */ + total_count: number; + /** @description Array of caches */ + actions_caches: { + /** @example 2 */ + id?: number; + /** @example refs/heads/main */ + ref?: string; + /** @example Linux-node-958aff96db2d75d67787d1e634ae70b659de937b */ + key?: string; + /** @example 73885106f58cc52a7df9ec4d4a5622a5614813162cb516c759a30af6bf56e6f0 */ + version?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + last_accessed_at?: string; + /** + * Format: date-time + * @example 2019-01-24T22:45:36.000Z + */ + created_at?: string; + /** @example 1024 */ + size_in_bytes?: number; + }[]; + }; + /** + * Job + * @description Information of a job execution in a workflow run + */ + job: { + /** + * @description The id of the job. + * @example 21 + */ + id: number; + /** + * @description The id of the associated workflow run. + * @example 5 + */ + run_id: number; + /** @example https://api.github.com/repos/github/hello-world/actions/runs/5 */ + run_url: string; + /** + * @description Attempt number of the associated workflow run, 1 for first attempt and higher if the workflow was re-run. + * @example 1 + */ + run_attempt?: number; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + /** + * @description The SHA of the commit that is being run. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example https://api.github.com/repos/github/hello-world/actions/jobs/21 */ + url: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string | null; + /** + * @description The phase of the lifecycle that the job is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @description The outcome of the job. + * @example success + * @enum {string|null} + */ + conclusion: + | ( + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + ) + | null; + /** + * Format: date-time + * @description The time that the job started, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + started_at: string; + /** + * Format: date-time + * @description The time that the job finished, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + completed_at: string | null; + /** + * @description The name of the job. + * @example test-coverage + */ + name: string; + /** @description Steps in this job. */ + steps?: { + /** + * @description The phase of the lifecycle that the job is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @description The outcome of the job. + * @example success + */ + conclusion: string | null; + /** + * @description The name of the job. + * @example test-coverage + */ + name: string; + /** @example 1 */ + number: number; + /** + * Format: date-time + * @description The time that the step started, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + started_at?: string | null; + /** + * Format: date-time + * @description The time that the job finished, in ISO 8601 format. + * @example 2019-08-08T08:00:00-07:00 + */ + completed_at?: string | null; + }[]; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + check_run_url: string; + /** + * @description Labels for the workflow job. Specified by the "runs_on" attribute in the action's workflow file. + * @example [ + * "self-hosted", + * "foo", + * "bar" + * ] + */ + labels: string[]; + /** + * @description The ID of the runner to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example 1 + */ + runner_id: number | null; + /** + * @description The name of the runner to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example my runner + */ + runner_name: string | null; + /** + * @description The ID of the runner group to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example 2 + */ + runner_group_id: number | null; + /** + * @description The name of the runner group to which this job has been assigned. (If a runner hasn't yet been assigned, this will be null.) + * @example my runner group + */ + runner_group_name: string | null; + }; + /** @description Whether GitHub Actions is enabled on the repository. */ + "actions-enabled": boolean; + "actions-repository-permissions": { + enabled: components["schemas"]["actions-enabled"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + selected_actions_url?: components["schemas"]["selected-actions-url"]; + }; + "actions-workflow-access-to-repository": { + /** + * @description Defines the level of access that workflows outside of the repository have to actions and reusable workflows within the + * repository. `none` means access is only possible from workflows in this repository. + * @enum {string} + */ + access_level: "none" | "organization" | "enterprise"; + }; + /** + * Referenced workflow + * @description A workflow referenced/reused by the initial caller workflow + */ + "referenced-workflow": { + path: string; + sha: string; + ref?: string; + }; + /** Pull Request Minimal */ + "pull-request-minimal": { + id: number; + number: number; + url: string; + head: { + ref: string; + sha: string; + repo: { + id: number; + url: string; + name: string; + }; + }; + base: { + ref: string; + sha: string; + repo: { + id: number; + url: string; + name: string; + }; + }; + }; + /** + * Simple Commit + * @description Simple Commit + */ + "nullable-simple-commit": { + id: string; + tree_id: string; + message: string; + /** Format: date-time */ + timestamp: string; + author: { + name: string; + email: string; + } | null; + committer: { + name: string; + email: string; + } | null; + } | null; + /** + * Workflow Run + * @description An invocation of a workflow + */ + "workflow-run": { + /** + * @description The ID of the workflow run. + * @example 5 + */ + id: number; + /** + * @description The name of the workflow run. + * @example Build + */ + name?: string | null; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** + * @description The ID of the associated check suite. + * @example 42 + */ + check_suite_id?: number; + /** + * @description The node ID of the associated check suite. + * @example MDEwOkNoZWNrU3VpdGU0Mg== + */ + check_suite_node_id?: string; + /** @example master */ + head_branch: string | null; + /** + * @description The SHA of the head commit that points to the version of the workflow being run. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** + * @description The full path of the workflow + * @example octocat/octo-repo/.github/workflows/ci.yml@main + */ + path: string; + /** + * @description The auto incrementing run number for the workflow run. + * @example 106 + */ + run_number: number; + /** + * @description Attempt number of the run, 1 for first attempt and higher if the workflow was re-run. + * @example 1 + */ + run_attempt?: number; + referenced_workflows?: + | components["schemas"]["referenced-workflow"][] + | null; + /** @example push */ + event: string; + /** @example completed */ + status: string | null; + /** @example neutral */ + conclusion: string | null; + /** + * @description The ID of the parent workflow. + * @example 5 + */ + workflow_id: number; + /** + * @description The URL to the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5 + */ + url: string; + /** @example https://github.com/github/hello-world/suites/4 */ + html_url: string; + pull_requests: components["schemas"]["pull-request-minimal"][] | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + actor?: components["schemas"]["simple-user"]; + triggering_actor?: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @description The start time of the latest run. Resets on re-run. + */ + run_started_at?: string; + /** + * @description The URL to the jobs for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/jobs + */ + jobs_url: string; + /** + * @description The URL to download the logs for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/logs + */ + logs_url: string; + /** + * @description The URL to the associated check suite. + * @example https://api.github.com/repos/github/hello-world/check-suites/12 + */ + check_suite_url: string; + /** + * @description The URL to the artifacts for the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/rerun/artifacts + */ + artifacts_url: string; + /** + * @description The URL to cancel the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/cancel + */ + cancel_url: string; + /** + * @description The URL to rerun the workflow run. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/rerun + */ + rerun_url: string; + /** + * @description The URL to the previous attempted run of this workflow, if one exists. + * @example https://api.github.com/repos/github/hello-world/actions/runs/5/attempts/3 + */ + previous_attempt_url?: string | null; + /** + * @description The URL to the workflow. + * @example https://api.github.com/repos/github/hello-world/actions/workflows/main.yaml + */ + workflow_url: string; + head_commit: components["schemas"]["nullable-simple-commit"]; + repository: components["schemas"]["minimal-repository"]; + head_repository: components["schemas"]["minimal-repository"]; + /** @example 5 */ + head_repository_id?: number; + /** + * @description The event-specific title associated with the run or the run-name if set, or the value of `run-name` if it is set in the workflow. + * @example Simple Workflow + */ + display_title: string; + }; + /** + * Environment Approval + * @description An entry in the reviews log for environment deployments + */ + "environment-approvals": { + /** @description The list of environments that were approved or rejected */ + environments: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id?: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id?: string; + /** + * @description The name of the environment. + * @example staging + */ + name?: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url?: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url?: string; + /** + * Format: date-time + * @description The time that the environment was created, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + created_at?: string; + /** + * Format: date-time + * @description The time that the environment was last updated, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + updated_at?: string; + }[]; + /** + * @description Whether deployment to the environment(s) was approved or rejected + * @example approved + * @enum {string} + */ + state: "approved" | "rejected"; + user: components["schemas"]["simple-user"]; + /** + * @description The comment submitted with the deployment review + * @example Ship it! + */ + comment: string; + }; + /** + * @description The type of reviewer. + * @example User + * @enum {string} + */ + "deployment-reviewer-type": "User" | "Team"; + /** + * Pending Deployment + * @description Details of a deployment that is waiting for protection rules to pass + */ + "pending-deployment": { + environment: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id?: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id?: string; + /** + * @description The name of the environment. + * @example staging + */ + name?: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url?: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url?: string; + }; + /** + * @description The set duration of the wait timer + * @example 30 + */ + wait_timer: number; + /** + * Format: date-time + * @description The time that the wait timer began. + * @example 2020-11-23T22:00:40Z + */ + wait_timer_started_at: string | null; + /** + * @description Whether the currently authenticated user can approve the deployment + * @example true + */ + current_user_can_approve: boolean; + /** @description The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers: { + type?: components["schemas"]["deployment-reviewer-type"]; + reviewer?: Partial & + Partial; + }[]; + }; + /** + * Deployment + * @description A request for a specific ref(branch,sha,tag) to be deployed + */ + deployment: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1 + */ + url: string; + /** + * @description Unique identifier of the deployment + * @example 42 + */ + id: number; + /** @example MDEwOkRlcGxveW1lbnQx */ + node_id: string; + /** @example a84d88e7554fc1fa21bcbc4efae3c782a70d2b9d */ + sha: string; + /** + * @description The ref to deploy. This can be a branch, tag, or sha. + * @example topic-branch + */ + ref: string; + /** + * @description Parameter to specify a task to execute + * @example deploy + */ + task: string; + payload: { [key: string]: unknown } | string; + /** @example staging */ + original_environment?: string; + /** + * @description Name for the target deployment environment. + * @example production + */ + environment: string; + /** @example Deploy request from hubot */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1/statuses + */ + statuses_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * @description Specifies if the given environment is will no longer exist at some point in the future. Default: false. + * @example true + */ + transient_environment?: boolean; + /** + * @description Specifies if the given environment is one that end-users directly interact with. Default: false. + * @example true + */ + production_environment?: boolean; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * Workflow Run Usage + * @description Workflow Run Usage + */ + "workflow-run-usage": { + billable: { + UBUNTU?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + MACOS?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + WINDOWS?: { + total_ms: number; + jobs: number; + job_runs?: { + job_id: number; + duration_ms: number; + }[]; + }; + }; + run_duration_ms?: number; + }; + /** + * Actions Secret + * @description Set secrets for GitHub Actions. + */ + "actions-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Workflow + * @description A GitHub Actions workflow + */ + workflow: { + /** @example 5 */ + id: number; + /** @example MDg6V29ya2Zsb3cxMg== */ + node_id: string; + /** @example CI */ + name: string; + /** @example ruby.yaml */ + path: string; + /** + * @example active + * @enum {string} + */ + state: + | "active" + | "deleted" + | "disabled_fork" + | "disabled_inactivity" + | "disabled_manually"; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + created_at: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + updated_at: string; + /** @example https://api.github.com/repos/actions/setup-ruby/workflows/5 */ + url: string; + /** @example https://github.com/actions/setup-ruby/blob/master/.github/workflows/ruby.yaml */ + html_url: string; + /** @example https://github.com/actions/setup-ruby/workflows/CI/badge.svg */ + badge_url: string; + /** + * Format: date-time + * @example 2019-12-06T14:20:20.000Z + */ + deleted_at?: string; + }; + /** + * Workflow Usage + * @description Workflow Usage + */ + "workflow-usage": { + billable: { + UBUNTU?: { + total_ms?: number; + }; + MACOS?: { + total_ms?: number; + }; + WINDOWS?: { + total_ms?: number; + }; + }; + }; + /** + * Autolink reference + * @description An autolink reference. + */ + autolink: { + /** @example 3 */ + id: number; + /** + * @description The prefix of a key that is linkified. + * @example TICKET- + */ + key_prefix: string; + /** + * @description A template for the target URL that is generated if a key was found. + * @example https://example.com/TICKET?query= + */ + url_template: string; + /** + * @description Whether this autolink reference matches alphanumeric characters. If false, this autolink reference only matches numeric characters. + * @example true + */ + is_alphanumeric: boolean; + }; + /** + * Protected Branch Required Status Check + * @description Protected Branch Required Status Check + */ + "protected-branch-required-status-check": { + url?: string; + enforcement_level?: string; + contexts: string[]; + checks: { + context: string; + app_id: number | null; + }[]; + contexts_url?: string; + strict?: boolean; + }; + /** + * Protected Branch Admin Enforced + * @description Protected Branch Admin Enforced + */ + "protected-branch-admin-enforced": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/enforce_admins + */ + url: string; + /** @example true */ + enabled: boolean; + }; + /** + * Protected Branch Pull Request Review + * @description Protected Branch Pull Request Review + */ + "protected-branch-pull-request-review": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/dismissal_restrictions + */ + url?: string; + dismissal_restrictions?: { + /** @description The list of users with review dismissal access. */ + users?: components["schemas"]["simple-user"][]; + /** @description The list of teams with review dismissal access. */ + teams?: components["schemas"]["team"][]; + /** @description The list of apps with review dismissal access. */ + apps?: components["schemas"]["integration"][]; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions" */ + url?: string; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions/users" */ + users_url?: string; + /** @example "https://api.github.com/repos/the-org/an-org-repo/branches/master/protection/dismissal_restrictions/teams" */ + teams_url?: string; + }; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of users allowed to bypass pull request requirements. */ + users?: components["schemas"]["simple-user"][]; + /** @description The list of teams allowed to bypass pull request requirements. */ + teams?: components["schemas"]["team"][]; + /** @description The list of apps allowed to bypass pull request requirements. */ + apps?: components["schemas"]["integration"][]; + }; + /** @example true */ + dismiss_stale_reviews: boolean; + /** @example true */ + require_code_owner_reviews: boolean; + /** @example 2 */ + required_approving_review_count?: number; + }; + /** + * Branch Restriction Policy + * @description Branch Restriction Policy + */ + "branch-restriction-policy": { + /** Format: uri */ + url: string; + /** Format: uri */ + users_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri */ + apps_url: string; + users: { + login?: string; + id?: number; + node_id?: string; + avatar_url?: string; + gravatar_id?: string; + url?: string; + html_url?: string; + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + subscriptions_url?: string; + organizations_url?: string; + repos_url?: string; + events_url?: string; + received_events_url?: string; + type?: string; + site_admin?: boolean; + }[]; + teams: { + id?: number; + node_id?: string; + url?: string; + html_url?: string; + name?: string; + slug?: string; + description?: string | null; + privacy?: string; + permission?: string; + members_url?: string; + repositories_url?: string; + parent?: string | null; + }[]; + apps: { + id?: number; + slug?: string; + node_id?: string; + owner?: { + login?: string; + id?: number; + node_id?: string; + url?: string; + repos_url?: string; + events_url?: string; + hooks_url?: string; + issues_url?: string; + members_url?: string; + public_members_url?: string; + avatar_url?: string; + description?: string; + /** @example "" */ + gravatar_id?: string; + /** @example "https://github.com/testorg-ea8ec76d71c3af4b" */ + html_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/followers" */ + followers_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/following{/other_user}" */ + following_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/gists{/gist_id}" */ + gists_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/starred{/owner}{/repo}" */ + starred_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/subscriptions" */ + subscriptions_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/orgs" */ + organizations_url?: string; + /** @example "https://api.github.com/users/testorg-ea8ec76d71c3af4b/received_events" */ + received_events_url?: string; + /** @example "Organization" */ + type?: string; + /** @example false */ + site_admin?: boolean; + }; + name?: string; + description?: string; + external_url?: string; + html_url?: string; + created_at?: string; + updated_at?: string; + permissions?: { + metadata?: string; + contents?: string; + issues?: string; + single_file?: string; + }; + events?: string[]; + }[]; + }; + /** + * Branch Protection + * @description Branch Protection + */ + "branch-protection": { + url?: string; + enabled?: boolean; + required_status_checks?: components["schemas"]["protected-branch-required-status-check"]; + enforce_admins?: components["schemas"]["protected-branch-admin-enforced"]; + required_pull_request_reviews?: components["schemas"]["protected-branch-pull-request-review"]; + restrictions?: components["schemas"]["branch-restriction-policy"]; + required_linear_history?: { + enabled?: boolean; + }; + allow_force_pushes?: { + enabled?: boolean; + }; + allow_deletions?: { + enabled?: boolean; + }; + block_creations?: { + enabled?: boolean; + }; + required_conversation_resolution?: { + enabled?: boolean; + }; + /** @example "branch/with/protection" */ + name?: string; + /** @example "https://api.github.com/repos/owner-79e94e2d36b3fd06a32bb213/AAA_Public_Repo/branches/branch/with/protection/protection" */ + protection_url?: string; + required_signatures?: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_signatures + */ + url: string; + /** @example true */ + enabled: boolean; + }; + }; + /** + * Short Branch + * @description Short Branch + */ + "short-branch": { + name: string; + commit: { + sha: string; + /** Format: uri */ + url: string; + }; + protected: boolean; + protection?: components["schemas"]["branch-protection"]; + /** Format: uri */ + protection_url?: string; + }; + /** + * Git User + * @description Metaproperties for Git author/committer information. + */ + "nullable-git-user": { + /** @example "Chris Wanstrath" */ + name?: string; + /** @example "chris@ozmm.org" */ + email?: string; + /** @example "2007-10-29T02:42:39.000-07:00" */ + date?: string; + } | null; + /** Verification */ + verification: { + verified: boolean; + reason: string; + payload: string | null; + signature: string | null; + }; + /** + * Diff Entry + * @description Diff Entry + */ + "diff-entry": { + /** @example bbcd538c8e72b8c175046e27cc8f907076331401 */ + sha: string; + /** @example file1.txt */ + filename: string; + /** + * @example added + * @enum {string} + */ + status: + | "added" + | "removed" + | "modified" + | "renamed" + | "copied" + | "changed" + | "unchanged"; + /** @example 103 */ + additions: number; + /** @example 21 */ + deletions: number; + /** @example 124 */ + changes: number; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt + */ + blob_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt + */ + raw_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + contents_url: string; + /** @example @@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test */ + patch?: string; + /** @example file.txt */ + previous_filename?: string; + }; + /** + * Commit + * @description Commit + */ + commit: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + url: string; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + sha: string; + /** @example MDY6Q29tbWl0NmRjYjA5YjViNTc4NzVmMzM0ZjYxYWViZWQ2OTVlMmU0MTkzZGI1ZQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/comments + */ + comments_url: string; + commit: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + url: string; + author: components["schemas"]["nullable-git-user"]; + committer: components["schemas"]["nullable-git-user"]; + /** @example Fix all the bugs */ + message: string; + /** @example 0 */ + comment_count: number; + tree: { + /** @example 827efc6d56897b048c772eb4087f854f46256132 */ + sha: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/tree/827efc6d56897b048c772eb4087f854f46256132 + */ + url: string; + }; + verification?: components["schemas"]["verification"]; + }; + author: components["schemas"]["nullable-simple-user"]; + committer: components["schemas"]["nullable-simple-user"]; + parents: { + /** @example 7638417db6d59f3c431d3e1f261cc637155684cd */ + sha: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/commits/7638417db6d59f3c431d3e1f261cc637155684cd + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/commit/7638417db6d59f3c431d3e1f261cc637155684cd + */ + html_url?: string; + }[]; + stats?: { + additions?: number; + deletions?: number; + total?: number; + }; + files?: components["schemas"]["diff-entry"][]; + }; + /** + * Branch With Protection + * @description Branch With Protection + */ + "branch-with-protection": { + name: string; + commit: components["schemas"]["commit"]; + _links: { + html: string; + /** Format: uri */ + self: string; + }; + protected: boolean; + protection: components["schemas"]["branch-protection"]; + /** Format: uri */ + protection_url: string; + /** @example "mas*" */ + pattern?: string; + /** @example 1 */ + required_approving_review_count?: number; + }; + /** + * Status Check Policy + * @description Status Check Policy + */ + "status-check-policy": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks + */ + url: string; + /** @example true */ + strict: boolean; + /** + * @example [ + * "continuous-integration/travis-ci" + * ] + */ + contexts: string[]; + checks: { + /** @example continuous-integration/travis-ci */ + context: string; + app_id: number | null; + }[]; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_status_checks/contexts + */ + contexts_url: string; + }; + /** + * Protected Branch + * @description Branch protections protect branches + */ + "protected-branch": { + /** Format: uri */ + url: string; + required_status_checks?: components["schemas"]["status-check-policy"]; + required_pull_request_reviews?: { + /** Format: uri */ + url: string; + dismiss_stale_reviews?: boolean; + require_code_owner_reviews?: boolean; + required_approving_review_count?: number; + dismissal_restrictions?: { + /** Format: uri */ + url: string; + /** Format: uri */ + users_url: string; + /** Format: uri */ + teams_url: string; + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + apps?: components["schemas"]["integration"][]; + }; + bypass_pull_request_allowances?: { + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + apps?: components["schemas"]["integration"][]; + }; + }; + required_signatures?: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/branches/master/protection/required_signatures + */ + url: string; + /** @example true */ + enabled: boolean; + }; + enforce_admins?: { + /** Format: uri */ + url: string; + enabled: boolean; + }; + required_linear_history?: { + enabled: boolean; + }; + allow_force_pushes?: { + enabled: boolean; + }; + allow_deletions?: { + enabled: boolean; + }; + restrictions?: components["schemas"]["branch-restriction-policy"]; + required_conversation_resolution?: { + enabled?: boolean; + }; + block_creations?: { + enabled: boolean; + }; + }; + /** + * Deployment + * @description A deployment created as the result of an Actions check run from a workflow that references an environment + */ + "deployment-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1 + */ + url: string; + /** + * @description Unique identifier of the deployment + * @example 42 + */ + id: number; + /** @example MDEwOkRlcGxveW1lbnQx */ + node_id: string; + /** + * @description Parameter to specify a task to execute + * @example deploy + */ + task: string; + /** @example staging */ + original_environment?: string; + /** + * @description Name for the target deployment environment. + * @example production + */ + environment: string; + /** @example Deploy request from hubot */ + description: string | null; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/1/statuses + */ + statuses_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * @description Specifies if the given environment is will no longer exist at some point in the future. Default: false. + * @example true + */ + transient_environment?: boolean; + /** + * @description Specifies if the given environment is one that end-users directly interact with. Default: false. + * @example true + */ + production_environment?: boolean; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * CheckRun + * @description A check performed on the code of a given code change + */ + "check-run": { + /** + * @description The id of the check. + * @example 21 + */ + id: number; + /** + * @description The SHA of the commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** @example MDg6Q2hlY2tSdW40 */ + node_id: string; + /** @example 42 */ + external_id: string | null; + /** @example https://api.github.com/repos/github/hello-world/check-runs/4 */ + url: string; + /** @example https://github.com/github/hello-world/runs/4 */ + html_url: string | null; + /** @example https://example.com */ + details_url: string | null; + /** + * @description The phase of the lifecycle that the check is currently in. + * @example queued + * @enum {string} + */ + status: "queued" | "in_progress" | "completed"; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | ( + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + ) + | null; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + started_at: string | null; + /** + * Format: date-time + * @example 2018-05-04T01:14:52Z + */ + completed_at: string | null; + output: { + title: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + /** Format: uri */ + annotations_url: string; + }; + /** + * @description The name of the check. + * @example test-coverage + */ + name: string; + check_suite: { + id: number; + } | null; + app: components["schemas"]["nullable-integration"]; + pull_requests: components["schemas"]["pull-request-minimal"][]; + deployment?: components["schemas"]["deployment-simple"]; + }; + /** + * Check Annotation + * @description Check Annotation + */ + "check-annotation": { + /** @example README.md */ + path: string; + /** @example 2 */ + start_line: number; + /** @example 2 */ + end_line: number; + /** @example 5 */ + start_column: number | null; + /** @example 10 */ + end_column: number | null; + /** @example warning */ + annotation_level: string | null; + /** @example Spell Checker */ + title: string | null; + /** @example Check your spelling for 'banaas'. */ + message: string | null; + /** @example Do you mean 'bananas' or 'banana'? */ + raw_details: string | null; + blob_href: string; + }; + /** + * Simple Commit + * @description Simple Commit + */ + "simple-commit": { + id: string; + tree_id: string; + message: string; + /** Format: date-time */ + timestamp: string; + author: { + name: string; + email: string; + } | null; + committer: { + name: string; + email: string; + } | null; + }; + /** + * CheckSuite + * @description A suite of checks performed on the code of a given code change + */ + "check-suite": { + /** @example 5 */ + id: number; + /** @example MDEwOkNoZWNrU3VpdGU1 */ + node_id: string; + /** @example master */ + head_branch: string | null; + /** + * @description The SHA of the head commit that is being checked. + * @example 009b8a3a9ccbb128af87f9b1c0f4c62e8a304f6d + */ + head_sha: string; + /** + * @example completed + * @enum {string|null} + */ + status: ("queued" | "in_progress" | "completed") | null; + /** + * @example neutral + * @enum {string|null} + */ + conclusion: + | ( + | "success" + | "failure" + | "neutral" + | "cancelled" + | "skipped" + | "timed_out" + | "action_required" + ) + | null; + /** @example https://api.github.com/repos/github/hello-world/check-suites/5 */ + url: string | null; + /** @example 146e867f55c26428e5f9fade55a9bbf5e95a7912 */ + before: string | null; + /** @example d6fde92930d4715a2b49857d24b940956b26d2d3 */ + after: string | null; + pull_requests: components["schemas"]["pull-request-minimal"][] | null; + app: components["schemas"]["nullable-integration"]; + repository: components["schemas"]["minimal-repository"]; + /** Format: date-time */ + created_at: string | null; + /** Format: date-time */ + updated_at: string | null; + head_commit: components["schemas"]["simple-commit"]; + latest_check_runs_count: number; + check_runs_url: string; + rerequestable?: boolean; + runs_rerequestable?: boolean; + }; + /** + * Check Suite Preference + * @description Check suite configuration preferences for a repository. + */ + "check-suite-preference": { + preferences: { + auto_trigger_checks?: { + app_id: number; + setting: boolean; + }[]; + }; + repository: components["schemas"]["minimal-repository"]; + }; + "code-scanning-alert-rule-summary": { + /** @description A unique identifier for the rule used to detect the alert. */ + id?: string | null; + /** @description The name of the rule used to detect the alert. */ + name?: string; + /** @description A set of tags applicable for the rule. */ + tags?: string[] | null; + /** + * @description The severity of the alert. + * @enum {string|null} + */ + severity?: ("none" | "note" | "warning" | "error") | null; + /** @description A short description of the rule used to detect the alert. */ + description?: string; + }; + "code-scanning-alert-items": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["code-scanning-alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule-summary"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + }; + "code-scanning-alert": { + number: components["schemas"]["alert-number"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + instances_url: components["schemas"]["alert-instances-url"]; + state: components["schemas"]["code-scanning-alert-state"]; + fixed_at?: components["schemas"]["code-scanning-alert-fixed-at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_at: components["schemas"]["code-scanning-alert-dismissed-at"]; + dismissed_reason: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + rule: components["schemas"]["code-scanning-alert-rule"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + most_recent_instance: components["schemas"]["code-scanning-alert-instance"]; + }; + /** + * @description Sets the state of the code scanning alert. You must provide `dismissed_reason` when you set the state to `dismissed`. + * @enum {string} + */ + "code-scanning-alert-set-state": "open" | "dismissed"; + /** + * @description An identifier for the upload. + * @example 6c81cd8e-b078-4ac3-a3be-1dad7dbd0b53 + */ + "code-scanning-analysis-sarif-id": string; + /** @description The SHA of the commit to which the analysis you are uploading relates. */ + "code-scanning-analysis-commit-sha": string; + /** @description Identifies the variable values associated with the environment in which this analysis was performed. */ + "code-scanning-analysis-environment": string; + /** + * Format: date-time + * @description The time that the analysis was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "code-scanning-analysis-created-at": string; + /** + * Format: uri + * @description The REST API URL of the analysis resource. + */ + "code-scanning-analysis-url": string; + "code-scanning-analysis": { + ref: components["schemas"]["code-scanning-ref"]; + commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; + analysis_key: components["schemas"]["code-scanning-analysis-analysis-key"]; + environment: components["schemas"]["code-scanning-analysis-environment"]; + category?: components["schemas"]["code-scanning-analysis-category"]; + /** @example error reading field xyz */ + error: string; + created_at: components["schemas"]["code-scanning-analysis-created-at"]; + /** @description The total number of results in the analysis. */ + results_count: number; + /** @description The total number of rules used in the analysis. */ + rules_count: number; + /** @description Unique identifier for this analysis. */ + id: number; + url: components["schemas"]["code-scanning-analysis-url"]; + sarif_id: components["schemas"]["code-scanning-analysis-sarif-id"]; + tool: components["schemas"]["code-scanning-analysis-tool"]; + deletable: boolean; + /** + * @description Warning generated when processing the analysis + * @example 123 results were ignored + */ + warning: string; + }; + /** + * Analysis deletion + * @description Successful deletion of a code scanning analysis + */ + "code-scanning-analysis-deletion": { + /** + * Format: uri + * @description Next deletable analysis in chain, without last analysis deletion confirmation + */ + next_analysis_url: string | null; + /** + * Format: uri + * @description Next deletable analysis in chain, with last analysis deletion confirmation + */ + confirm_delete_url: string | null; + }; + /** + * CodeQL Database + * @description A CodeQL database. + */ + "code-scanning-codeql-database": { + /** @description The ID of the CodeQL database. */ + id: number; + /** @description The name of the CodeQL database. */ + name: string; + /** @description The language of the CodeQL database. */ + language: string; + uploader: components["schemas"]["simple-user"]; + /** @description The MIME type of the CodeQL database file. */ + content_type: string; + /** @description The size of the CodeQL database file in bytes. */ + size: number; + /** + * Format: date-time + * @description The date and time at which the CodeQL database was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the CodeQL database was last updated, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + updated_at: string; + /** + * Format: uri + * @description The URL at which to download the CodeQL database. The `Accept` header must be set to the value of the `content_type` property. + */ + url: string; + }; + /** @description A Base64 string representing the SARIF file to upload. You must first compress your SARIF file using [`gzip`](http://www.gnu.org/software/gzip/manual/gzip.html) and then translate the contents of the file into a Base64 encoding string. For more information, see "[SARIF support for code scanning](https://docs.github.com/code-security/secure-coding/sarif-support-for-code-scanning)." */ + "code-scanning-analysis-sarif-file": string; + "code-scanning-sarifs-receipt": { + id?: components["schemas"]["code-scanning-analysis-sarif-id"]; + /** + * Format: uri + * @description The REST API URL for checking the status of the upload. + */ + url?: string; + }; + "code-scanning-sarifs-status": { + /** + * @description `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. `failed` files have either not been processed at all, or could only be partially processed. + * @enum {string} + */ + processing_status?: "pending" | "complete" | "failed"; + /** + * Format: uri + * @description The REST API URL for getting the analyses associated with the upload. + */ + analyses_url?: string | null; + /** @description Any errors that ocurred during processing of the delivery. */ + errors?: string[] | null; + }; + /** + * CODEOWNERS errors + * @description A list of errors found in a repo's CODEOWNERS file + */ + "codeowners-errors": { + errors: { + /** + * @description The line number where this errors occurs. + * @example 7 + */ + line: number; + /** + * @description The column number where this errors occurs. + * @example 3 + */ + column: number; + /** + * @description The contents of the line where the error occurs. + * @example * user + */ + source?: string; + /** + * @description The type of error. + * @example Invalid owner + */ + kind: string; + /** + * @description Suggested action to fix the error. This will usually be `null`, but is provided for some common errors. + * @example The pattern `/` will never match anything, did you mean `*` instead? + */ + suggestion?: string | null; + /** + * @description A human-readable description of the error, combining information from multiple fields, laid out for display in a monospaced typeface (for example, a command-line setting). + * @example Invalid owner on line 7: + * + * * user + * ^ + */ + message: string; + /** + * @description The path of the file where the error occured. + * @example .github/CODEOWNERS + */ + path: string; + }[]; + }; + /** + * Codespace machine + * @description A description of the machine powering a codespace. + */ + "codespace-machine": { + /** + * @description The name of the machine. + * @example standardLinux + */ + name: string; + /** + * @description The display name of the machine includes cores, memory, and storage. + * @example 4 cores, 8 GB RAM, 64 GB storage + */ + display_name: string; + /** + * @description The operating system of the machine. + * @example linux + */ + operating_system: string; + /** + * @description How much storage is available to the codespace. + * @example 68719476736 + */ + storage_in_bytes: number; + /** + * @description How much memory is available to the codespace. + * @example 8589934592 + */ + memory_in_bytes: number; + /** + * @description How many cores are available to the codespace. + * @example 4 + */ + cpus: number; + /** + * @description Whether a prebuild is currently available when creating a codespace for this machine and repository. If a branch was not specified as a ref, the default branch will be assumed. Value will be "null" if prebuilds are not supported or prebuild availability could not be determined. Value will be "none" if no prebuild is available. Latest values "ready" and "in_progress" indicate the prebuild availability status. + * @example ready + * @enum {string|null} + */ + prebuild_availability: ("none" | "ready" | "in_progress") | null; + }; + /** + * Codespaces Secret + * @description Set repository secrets for GitHub Codespaces. + */ + "repo-codespaces-secret": { + /** + * @description The name of the secret. + * @example SECRET_TOKEN + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Collaborator + * @description Collaborator + */ + collaborator: { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + email?: string | null; + name?: string | null; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + permissions?: { + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + admin: boolean; + }; + /** @example admin */ + role_name: string; + }; + /** + * Repository Invitation + * @description Repository invitations let you manage who you collaborate with. + */ + "repository-invitation": { + /** + * @description Unique identifier of the repository invitation. + * @example 42 + */ + id: number; + repository: components["schemas"]["minimal-repository"]; + invitee: components["schemas"]["nullable-simple-user"]; + inviter: components["schemas"]["nullable-simple-user"]; + /** + * @description The permission associated with the invitation. + * @example read + * @enum {string} + */ + permissions: "read" | "write" | "admin" | "triage" | "maintain"; + /** + * Format: date-time + * @example 2016-06-13T14:52:50-05:00 + */ + created_at: string; + /** @description Whether or not the invitation has expired */ + expired?: boolean; + /** + * @description URL for the repository invitation + * @example https://api.github.com/user/repository-invitations/1 + */ + url: string; + /** @example https://github.com/octocat/Hello-World/invitations */ + html_url: string; + node_id: string; + }; + /** + * Collaborator + * @description Collaborator + */ + "nullable-collaborator": { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + email?: string | null; + name?: string | null; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + permissions?: { + pull: boolean; + triage?: boolean; + push: boolean; + maintain?: boolean; + admin: boolean; + }; + /** @example admin */ + role_name: string; + } | null; + /** + * Repository Collaborator Permission + * @description Repository Collaborator Permission + */ + "repository-collaborator-permission": { + permission: string; + /** @example admin */ + role_name: string; + user: components["schemas"]["nullable-collaborator"]; + }; + /** + * Commit Comment + * @description Commit Comment + */ + "commit-comment": { + /** Format: uri */ + html_url: string; + /** Format: uri */ + url: string; + id: number; + node_id: string; + body: string; + path: string | null; + position: number | null; + line: number | null; + commit_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Branch Short + * @description Branch Short + */ + "branch-short": { + name: string; + commit: { + sha: string; + url: string; + }; + protected: boolean; + }; + /** + * Link + * @description Hypermedia Link + */ + link: { + href: string; + }; + /** + * Auto merge + * @description The status of auto merging a pull request. + */ + "auto-merge": { + enabled_by: components["schemas"]["simple-user"]; + /** + * @description The merge method to use. + * @enum {string} + */ + merge_method: "merge" | "squash" | "rebase"; + /** @description Title for the merge commit message. */ + commit_title: string; + /** @description Commit message for the merge commit. */ + commit_message: string; + } | null; + /** + * Pull Request Simple + * @description Pull Request Simple + */ + "pull-request-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDExOlB1bGxSZXF1ZXN0MQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347 + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.patch + */ + patch_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits + */ + commits_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments + */ + review_comments_url: string; + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number} */ + review_comment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347/comments + */ + comments_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + statuses_url: string; + /** @example 1347 */ + number: number; + /** @example open */ + state: string; + /** @example true */ + locked: boolean; + /** @example new-feature */ + title: string; + user: components["schemas"]["nullable-simple-user"]; + /** @example Please pull these awesome changes */ + body: string | null; + labels: { + /** Format: int64 */ + id: number; + node_id: string; + url: string; + name: string; + description: string; + color: string; + default: boolean; + }[]; + milestone: components["schemas"]["nullable-milestone"]; + /** @example too heated */ + active_lock_reason?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + merged_at: string | null; + /** @example e5bd3914e2e596debea16f433f57875b5b90bcd6 */ + merge_commit_sha: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + requested_reviewers?: components["schemas"]["simple-user"][] | null; + requested_teams?: components["schemas"]["team"][] | null; + head: { + label: string; + ref: string; + repo: components["schemas"]["repository"]; + sha: string; + user: components["schemas"]["nullable-simple-user"]; + }; + base: { + label: string; + ref: string; + repo: components["schemas"]["repository"]; + sha: string; + user: components["schemas"]["nullable-simple-user"]; + }; + _links: { + comments: components["schemas"]["link"]; + commits: components["schemas"]["link"]; + statuses: components["schemas"]["link"]; + html: components["schemas"]["link"]; + issue: components["schemas"]["link"]; + review_comments: components["schemas"]["link"]; + review_comment: components["schemas"]["link"]; + self: components["schemas"]["link"]; + }; + author_association: components["schemas"]["author-association"]; + auto_merge: components["schemas"]["auto-merge"]; + /** + * @description Indicates whether or not the pull request is a draft. + * @example false + */ + draft?: boolean; + }; + /** Simple Commit Status */ + "simple-commit-status": { + description: string | null; + id: number; + node_id: string; + state: string; + context: string; + /** Format: uri */ + target_url: string | null; + required?: boolean | null; + /** Format: uri */ + avatar_url: string | null; + /** Format: uri */ + url: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Combined Commit Status + * @description Combined Commit Status + */ + "combined-commit-status": { + state: string; + statuses: components["schemas"]["simple-commit-status"][]; + sha: string; + total_count: number; + repository: components["schemas"]["minimal-repository"]; + /** Format: uri */ + commit_url: string; + /** Format: uri */ + url: string; + }; + /** + * Status + * @description The status of a commit. + */ + status: { + url: string; + avatar_url: string | null; + id: number; + node_id: string; + state: string; + description: string | null; + target_url: string | null; + context: string; + created_at: string; + updated_at: string; + creator: components["schemas"]["nullable-simple-user"]; + }; + /** + * Code Of Conduct Simple + * @description Code of Conduct Simple + */ + "nullable-code-of-conduct-simple": { + /** + * Format: uri + * @example https://api.github.com/repos/github/docs/community/code_of_conduct + */ + url: string; + /** @example citizen_code_of_conduct */ + key: string; + /** @example Citizen Code of Conduct */ + name: string; + /** + * Format: uri + * @example https://github.com/github/docs/blob/main/CODE_OF_CONDUCT.md + */ + html_url: string | null; + } | null; + /** Community Health File */ + "nullable-community-health-file": { + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + } | null; + /** + * Community Profile + * @description Community Profile + */ + "community-profile": { + /** @example 100 */ + health_percentage: number; + /** @example My first repository on GitHub! */ + description: string | null; + /** @example example.com */ + documentation: string | null; + files: { + code_of_conduct: components["schemas"]["nullable-code-of-conduct-simple"]; + code_of_conduct_file: components["schemas"]["nullable-community-health-file"]; + license: components["schemas"]["nullable-license-simple"]; + contributing: components["schemas"]["nullable-community-health-file"]; + readme: components["schemas"]["nullable-community-health-file"]; + issue_template: components["schemas"]["nullable-community-health-file"]; + pull_request_template: components["schemas"]["nullable-community-health-file"]; + }; + /** + * Format: date-time + * @example 2017-02-28T19:09:29Z + */ + updated_at: string | null; + /** @example true */ + content_reports_enabled?: boolean; + }; + /** + * Commit Comparison + * @description Commit Comparison + */ + "commit-comparison": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/compare/master...topic + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/octocat:bbcd538c8e72b8c175046e27cc8f907076331401...octocat:0328041d1152db8ae77652d1618a02e57f745f17 + */ + permalink_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/compare/master...topic.patch + */ + patch_url: string; + base_commit: components["schemas"]["commit"]; + merge_base_commit: components["schemas"]["commit"]; + /** + * @example ahead + * @enum {string} + */ + status: "diverged" | "ahead" | "behind" | "identical"; + /** @example 4 */ + ahead_by: number; + /** @example 5 */ + behind_by: number; + /** @example 6 */ + total_commits: number; + commits: components["schemas"]["commit"][]; + files?: components["schemas"]["diff-entry"][]; + }; + /** + * Content Tree + * @description Content Tree + */ + "content-tree": { + type: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + entries?: { + type: string; + size: number; + name: string; + path: string; + content?: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }[]; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + } & { + content: unknown; + encoding: unknown; + }; + /** + * Content Directory + * @description A list of directory items + */ + "content-directory": { + /** @enum {string} */ + type: "dir" | "file" | "submodule" | "symlink"; + size: number; + name: string; + path: string; + content?: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }[]; + /** + * Content File + * @description Content File + */ + "content-file": { + /** @enum {string} */ + type: "file"; + encoding: string; + size: number; + name: string; + path: string; + content: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + /** @example "actual/actual.md" */ + target?: string; + /** @example "git://example.com/defunkt/dotjs.git" */ + submodule_git_url?: string; + }; + /** + * Symlink Content + * @description An object describing a symlink + */ + "content-symlink": { + /** @enum {string} */ + type: "symlink"; + target: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * Submodule Content + * @description An object describing a submodule + */ + "content-submodule": { + /** @enum {string} */ + type: "submodule"; + /** Format: uri */ + submodule_git_url: string; + size: number; + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + download_url: string | null; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + }; + /** + * File Commit + * @description File Commit + */ + "file-commit": { + content: { + name?: string; + path?: string; + sha?: string; + size?: number; + url?: string; + html_url?: string; + git_url?: string; + download_url?: string; + type?: string; + _links?: { + self?: string; + git?: string; + html?: string; + }; + } | null; + commit: { + sha?: string; + node_id?: string; + url?: string; + html_url?: string; + author?: { + date?: string; + name?: string; + email?: string; + }; + committer?: { + date?: string; + name?: string; + email?: string; + }; + message?: string; + tree?: { + url?: string; + sha?: string; + }; + parents?: { + url?: string; + html_url?: string; + sha?: string; + }[]; + verification?: { + verified?: boolean; + reason?: string; + signature?: string | null; + payload?: string | null; + }; + }; + }; + /** + * Contributor + * @description Contributor + */ + contributor: { + login?: string; + id?: number; + node_id?: string; + /** Format: uri */ + avatar_url?: string; + gravatar_id?: string | null; + /** Format: uri */ + url?: string; + /** Format: uri */ + html_url?: string; + /** Format: uri */ + followers_url?: string; + following_url?: string; + gists_url?: string; + starred_url?: string; + /** Format: uri */ + subscriptions_url?: string; + /** Format: uri */ + organizations_url?: string; + /** Format: uri */ + repos_url?: string; + events_url?: string; + /** Format: uri */ + received_events_url?: string; + type: string; + site_admin?: boolean; + contributions: number; + email?: string; + name?: string; + }; + /** + * @description The scope of the vulnerable dependency. + * @enum {string|null} + */ + "dependabot-alert-scope": ("development" | "runtime") | null; + /** + * @description State of a Dependabot alert. + * @enum {string} + */ + "dependabot-alert-state": "dismissed" | "fixed" | "open"; + /** @description The details of the vulnerable package. */ + "dependabot-alert-package": { + ecosystem?: string; + name?: string; + }; + "dependabot-alert-dependency": { + package?: components["schemas"]["dependabot-alert-package"]; + /** @description The path to the manifest filename. */ + manifest_path?: string; + scope?: components["schemas"]["dependabot-alert-scope"]; + }; + /** @description The details of the security advisory, including summary, description, and severity. */ + "dependabot-alert-security-advisory": { + ghsa_id: string; + cve_id?: string | null; + summary: string; + description: string; + /** @enum {string} */ + severity: "low" | "medium" | "high" | "critical"; + identifiers: { + /** @enum {string} */ + type: "GHSA" | "CVE"; + value: string; + }[]; + references: { + /** Format: uri */ + url: string; + }[]; + cvss: { + score: number; + vector_string: string | null; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + /** Format: date-time */ + published_at: string; + /** Format: date-time */ + updated_at: string; + vulnerabilities: { + first_patched_version: { + identifier: string; + } | null; + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + }[]; + /** Format: date-time */ + withdrawn_at: string | null; + }; + /** @description The details of the vulnerability of a security advisory. */ + "dependabot-alert-security-vulnerability": { + package?: components["schemas"]["dependabot-alert-package"]; + severity?: string; + vulnerable_version_range?: string; + first_patched_version?: { + identifier?: string; + }; + }; + /** + * Format: date-time + * @description The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "dependabot-alert-dismissed_at": string | null; + /** + * @description **Required when the `state` is `dismissed`.** The reason for dismissing the Dependabot alert. + * @enum {string|null} + */ + "dependabot-alert-dismissed-reason": + | ( + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk" + ) + | null; + /** @description An optional comment associated with the alert's dismissal. The maximum size is 280 characters. */ + "dependabot-alert-dismissed-comment": string | null; + /** + * Format: date-time + * @description The time that the alert was no longer detected and was considered fixed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + "dependabot-alert-fixed-at": string | null; + "dependabot-alert": { + number: components["schemas"]["alert-number"]; + state: components["schemas"]["dependabot-alert-state"]; + dependency: components["schemas"]["dependabot-alert-dependency"]; + security_advisory: components["schemas"]["dependabot-alert-security-advisory"]; + security_vulnerability: components["schemas"]["dependabot-alert-security-vulnerability"]; + created_at: components["schemas"]["alert-created-at"]; + updated_at: components["schemas"]["alert-updated-at"]; + url: components["schemas"]["alert-url"]; + html_url: components["schemas"]["alert-html-url"]; + dismissed_at: components["schemas"]["dependabot-alert-dismissed_at"]; + dismissed_by: components["schemas"]["nullable-simple-user"]; + dismissed_reason: components["schemas"]["dependabot-alert-dismissed-reason"]; + dismissed_comment: components["schemas"]["dependabot-alert-dismissed-comment"]; + fixed_at: components["schemas"]["dependabot-alert-fixed-at"]; + }; + /** + * @description Sets the status of the dependabot alert. You must provide `dismissed_reason` when you set the state to `dismissed`. + * @enum {string} + */ + "dependabot-alert-set-state": "dismissed" | "open"; + /** + * Dependabot Secret + * @description Set secrets for Dependabot. + */ + "dependabot-secret": { + /** + * @description The name of the secret. + * @example MY_ARTIFACTORY_PASSWORD + */ + name: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Dependency Graph Diff + * @description A diff of the dependencies between two commits. + */ + "dependency-graph-diff": { + /** @enum {string} */ + change_type: "added" | "removed"; + /** @example path/to/package-lock.json */ + manifest: string; + /** @example npm */ + ecosystem: string; + /** @example @actions/core */ + name: string; + /** @example 1.0.0 */ + version: string; + /** @example pkg:/npm/%40actions/core@1.1.0 */ + package_url: string | null; + /** @example MIT */ + license: string | null; + /** @example https://github.com/github/actions */ + source_repository_url: string | null; + vulnerabilities: { + /** @example critical */ + severity: string; + /** @example GHSA-rf4j-j272-fj86 */ + advisory_ghsa_id: string; + /** @example A summary of the advisory. */ + advisory_summary: string; + /** @example https://github.com/advisories/GHSA-rf4j-j272-fj86 */ + advisory_url: string; + }[]; + /** + * @description Where the dependency is utilized. `development` means that the dependency is only utilized in the development environment. `runtime` means that the dependency is utilized at runtime and in the development environment. + * @enum {string} + */ + scope: "unknown" | "runtime" | "development"; + }[]; + /** + * metadata + * @description User-defined metadata to store domain-specific information limited to 8 keys with scalar values. + */ + metadata: { + [key: string]: Partial & Partial & Partial; + }; + dependency: { + /** + * @description Package-url (PURL) of dependency. See https://github.com/package-url/purl-spec for more details. + * @example pkg:/npm/%40actions/http-client@1.0.11 + */ + package_url?: string; + metadata?: components["schemas"]["metadata"]; + /** + * @description A notation of whether a dependency is requested directly by this manifest or is a dependency of another dependency. + * @example direct + * @enum {string} + */ + relationship?: "direct" | "indirect"; + /** + * @description A notation of whether the dependency is required for the primary build artifact (runtime) or is only used for development. Future versions of this specification may allow for more granular scopes. + * @example runtime + * @enum {string} + */ + scope?: "runtime" | "development"; + /** + * @description Array of package-url (PURLs) of direct child dependencies. + * @example @actions/http-client + */ + dependencies?: string[]; + }; + manifest: { + /** + * @description The name of the manifest. + * @example package-lock.json + */ + name: string; + file?: { + /** + * @description The path of the manifest file relative to the root of the Git repository. + * @example /src/build/package-lock.json + */ + source_location?: string; + }; + metadata?: components["schemas"]["metadata"]; + /** @description A collection of resolved package dependencies. */ + resolved?: { [key: string]: components["schemas"]["dependency"] }; + }; + /** + * snapshot + * @description Create a new snapshot of a repository's dependencies. + */ + snapshot: { + /** @description The version of the repository snapshot submission. */ + version: number; + job: { + /** + * @description The external ID of the job. + * @example 5622a2b0-63f6-4732-8c34-a1ab27e102a11 + */ + id: string; + /** + * @description Correlator provides a key that is used to group snapshots submitted over time. Only the "latest" submitted snapshot for a given combination of `job.correlator` and `detector.name` will be considered when calculating a repository's current dependencies. Correlator should be as unique as it takes to distinguish all detection runs for a given "wave" of CI workflow you run. If you're using GitHub Actions, a good default value for this could be the environment variables GITHUB_WORKFLOW and GITHUB_JOB concatenated together. If you're using a build matrix, then you'll also need to add additional key(s) to distinguish between each submission inside a matrix variation. + * @example yourworkflowname_yourjobname + */ + correlator: string; + /** + * @description The url for the job. + * @example http://example.com/build + */ + html_url?: string; + }; + /** + * @description The commit SHA associated with this dependency snapshot. + * @example ddc951f4b1293222421f2c8df679786153acf689 + */ + sha: string; + /** + * @description The repository branch that triggered this snapshot. + * @example refs/heads/main + */ + ref: string; + /** @description A description of the detector used. */ + detector: { + /** + * @description The name of the detector used. + * @example docker buildtime detector + */ + name: string; + /** + * @description The version of the detector used. + * @example 1.0.0 + */ + version: string; + /** + * @description The url of the detector used. + * @example http://example.com/docker-buildtimer-detector + */ + url: string; + }; + metadata?: components["schemas"]["metadata"]; + /** @description A collection of package manifests, which are a collection of related dependencies declared in a file or representing a logical group of dependencies. */ + manifests?: { [key: string]: components["schemas"]["manifest"] }; + /** + * Format: date-time + * @description The time at which the snapshot was scanned. + * @example 2020-06-13T14:52:50-05:00 + */ + scanned: string; + }; + /** + * Deployment Status + * @description The status of a deployment. + */ + "deployment-status": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/42/statuses/1 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDE2OkRlcGxveW1lbnRTdGF0dXMx */ + node_id: string; + /** + * @description The state of the status. + * @example success + * @enum {string} + */ + state: + | "error" + | "failure" + | "inactive" + | "pending" + | "success" + | "queued" + | "in_progress"; + creator: components["schemas"]["nullable-simple-user"]; + /** + * @description A short description of the status. + * @default + * @example Deployment finished successfully. + */ + description: string; + /** + * @description The environment of the deployment that the status is for. + * @default + * @example production + */ + environment?: string; + /** + * Format: uri + * @description Deprecated: the URL to associate with this status. + * @default + * @example https://example.com/deployment/42/output + */ + target_url: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + created_at: string; + /** + * Format: date-time + * @example 2012-07-20T01:19:13Z + */ + updated_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/deployments/42 + */ + deployment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + /** + * Format: uri + * @description The URL for accessing your environment. + * @default + * @example https://staging.example.com/ + */ + environment_url?: string; + /** + * Format: uri + * @description The URL to associate with this status. + * @default + * @example https://example.com/deployment/42/output + */ + log_url?: string; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * @description The amount of time to delay a job after the job is initially triggered. The time (in minutes) must be an integer between 0 and 43,200 (30 days). + * @example 30 + */ + "wait-timer": number; + /** @description The type of deployment branch policy for this environment. To allow all branches to deploy, set to `null`. */ + "deployment-branch-policy-settings": { + /** @description Whether only branches with branch protection rules can deploy to this environment. If `protected_branches` is `true`, `custom_branch_policies` must be `false`; if `protected_branches` is `false`, `custom_branch_policies` must be `true`. */ + protected_branches: boolean; + /** @description Whether only branches that match the specified name patterns can deploy to this environment. If `custom_branch_policies` is `true`, `protected_branches` must be `false`; if `custom_branch_policies` is `false`, `protected_branches` must be `true`. */ + custom_branch_policies: boolean; + } | null; + /** + * Environment + * @description Details of a deployment environment + */ + environment: { + /** + * @description The id of the environment. + * @example 56780428 + */ + id: number; + /** @example MDExOkVudmlyb25tZW50NTY3ODA0Mjg= */ + node_id: string; + /** + * @description The name of the environment. + * @example staging + */ + name: string; + /** @example https://api.github.com/repos/github/hello-world/environments/staging */ + url: string; + /** @example https://github.com/github/hello-world/deployments/activity_log?environments_filter=staging */ + html_url: string; + /** + * Format: date-time + * @description The time that the environment was created, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + created_at: string; + /** + * Format: date-time + * @description The time that the environment was last updated, in ISO 8601 format. + * @example 2020-11-23T22:00:40Z + */ + updated_at: string; + protection_rules?: (Partial<{ + /** @example 3515 */ + id: number; + /** @example MDQ6R2F0ZTM1MTU= */ + node_id: string; + /** @example wait_timer */ + type: string; + wait_timer?: components["schemas"]["wait-timer"]; + }> & + Partial<{ + /** @example 3755 */ + id: number; + /** @example MDQ6R2F0ZTM3NTU= */ + node_id: string; + /** @example required_reviewers */ + type: string; + /** @description The people or teams that may approve jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers?: { + type?: components["schemas"]["deployment-reviewer-type"]; + reviewer?: Partial & + Partial; + }[]; + }> & + Partial<{ + /** @example 3515 */ + id: number; + /** @example MDQ6R2F0ZTM1MTU= */ + node_id: string; + /** @example branch_policy */ + type: string; + }>)[]; + deployment_branch_policy?: components["schemas"]["deployment-branch-policy-settings"]; + }; + /** + * Deployment branch policy + * @description Details of a deployment branch policy. + */ + "deployment-branch-policy": { + /** + * @description The unique identifier of the branch policy. + * @example 361471 + */ + id?: number; + /** @example MDE2OkdhdGVCcmFuY2hQb2xpY3kzNjE0NzE= */ + node_id?: string; + /** + * @description The name pattern that branches must match in order to deploy to the environment. + * @example release/* + */ + name?: string; + }; + /** Deployment branch policy name pattern */ + "deployment-branch-policy-name-pattern": { + /** + * @description The name pattern that branches must match in order to deploy to the environment. + * + * Wildcard characters will not match `/`. For example, to match branches that begin with `release/` and contain an additional single slash, use `release/*\/*`. + * For more information about pattern matching syntax, see the [Ruby File.fnmatch documentation](https://ruby-doc.org/core-2.5.1/File.html#method-c-fnmatch). + * @example release/* + */ + name: string; + }; + /** + * Short Blob + * @description Short Blob + */ + "short-blob": { + url: string; + sha: string; + }; + /** + * Blob + * @description Blob + */ + blob: { + content: string; + encoding: string; + /** Format: uri */ + url: string; + sha: string; + size: number | null; + node_id: string; + highlighted_content?: string; + }; + /** + * Git Commit + * @description Low-level Git commit operations within a repository + */ + "git-commit": { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + node_id: string; + /** Format: uri */ + url: string; + /** @description Identifying information for the git-user */ + author: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** @description Identifying information for the git-user */ + committer: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + tree: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + parents: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + }[]; + verification: { + verified: boolean; + reason: string; + signature: string | null; + payload: string | null; + }; + /** Format: uri */ + html_url: string; + }; + /** + * Git Reference + * @description Git references within a repository + */ + "git-ref": { + ref: string; + node_id: string; + /** Format: uri */ + url: string; + object: { + type: string; + /** + * @description SHA for the reference + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + }; + /** + * Git Tag + * @description Metadata for a Git tag + */ + "git-tag": { + /** @example MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw== */ + node_id: string; + /** + * @description Name of the tag + * @example v0.0.1 + */ + tag: string; + /** @example 940bd336248efae0f9ee5bc7b2d5c985887b16ac */ + sha: string; + /** + * Format: uri + * @description URL for the tag + * @example https://api.github.com/repositories/42/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac + */ + url: string; + /** + * @description Message describing the purpose of the tag + * @example Initial public release + */ + message: string; + tagger: { + date: string; + email: string; + name: string; + }; + object: { + sha: string; + type: string; + /** Format: uri */ + url: string; + }; + verification?: components["schemas"]["verification"]; + }; + /** + * Git Tree + * @description The hierarchy between files in a Git repository. + */ + "git-tree": { + sha: string; + /** Format: uri */ + url: string; + truncated: boolean; + /** + * @description Objects specifying a tree structure + * @example [ + * { + * "path": "file.rb", + * "mode": "100644", + * "type": "blob", + * "size": 30, + * "sha": "44b4fc6d56897b048c772eb4087f854f46256132", + * "url": "https://api.github.com/repos/octocat/Hello-World/git/blobs/44b4fc6d56897b048c772eb4087f854f46256132", + * "properties": { + * "path": { + * "type": "string" + * }, + * "mode": { + * "type": "string" + * }, + * "type": { + * "type": "string" + * }, + * "size": { + * "type": "integer" + * }, + * "sha": { + * "type": "string" + * }, + * "url": { + * "type": "string" + * } + * }, + * "required": [ + * "path", + * "mode", + * "type", + * "sha", + * "url", + * "size" + * ] + * } + * ] + */ + tree: { + /** @example test/file.rb */ + path?: string; + /** @example 040000 */ + mode?: string; + /** @example tree */ + type?: string; + /** @example 23f6827669e43831def8a7ad935069c8bd418261 */ + sha?: string; + /** @example 12 */ + size?: number; + /** @example https://api.github.com/repos/owner-482f3203ecf01f67e9deb18e/BBB_Private_Repo/git/blobs/23f6827669e43831def8a7ad935069c8bd418261 */ + url?: string; + }[]; + }; + /** Hook Response */ + "hook-response": { + code: number | null; + status: string | null; + message: string | null; + }; + /** + * Webhook + * @description Webhooks for repositories. + */ + hook: { + type: string; + /** + * @description Unique identifier of the webhook. + * @example 42 + */ + id: number; + /** + * @description The name of a valid service, use 'web' for a webhook. + * @example web + */ + name: string; + /** + * @description Determines whether the hook is actually triggered on pushes. + * @example true + */ + active: boolean; + /** + * @description Determines what events the hook is triggered for. Default: ['push']. + * @example [ + * "push", + * "pull_request" + * ] + */ + events: string[]; + config: { + /** @example "foo@bar.com" */ + email?: string; + /** @example "foo" */ + password?: string; + /** @example "roomer" */ + room?: string; + /** @example "foo" */ + subdomain?: string; + url?: components["schemas"]["webhook-config-url"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + /** @example "sha256" */ + digest?: string; + secret?: components["schemas"]["webhook-config-secret"]; + /** @example "abc" */ + token?: string; + }; + /** + * Format: date-time + * @example 2011-09-06T20:39:23Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-09-06T17:26:27Z + */ + created_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1 + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/test + */ + test_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/pings + */ + ping_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/hooks/1/deliveries + */ + deliveries_url?: string; + last_response: components["schemas"]["hook-response"]; + }; + /** + * Import + * @description A repository import from an external source. + */ + import: { + vcs: string | null; + use_lfs?: boolean; + /** @description The URL of the originating repository. */ + vcs_url: string; + svc_root?: string; + tfvc_project?: string; + /** @enum {string} */ + status: + | "auth" + | "error" + | "none" + | "detecting" + | "choose" + | "auth_failed" + | "importing" + | "mapping" + | "waiting_to_push" + | "pushing" + | "complete" + | "setup" + | "unknown" + | "detection_found_multiple" + | "detection_found_nothing" + | "detection_needs_auth"; + status_text?: string | null; + failed_step?: string | null; + error_message?: string | null; + import_percent?: number | null; + commit_count?: number | null; + push_percent?: number | null; + has_large_files?: boolean; + large_files_size?: number; + large_files_count?: number; + project_choices?: { + vcs?: string; + tfvc_project?: string; + human_name?: string; + }[]; + message?: string; + authors_count?: number | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + authors_url: string; + /** Format: uri */ + repository_url: string; + svn_root?: string; + }; + /** + * Porter Author + * @description Porter Author + */ + "porter-author": { + id: number; + remote_id: string; + remote_name: string; + email: string; + name: string; + /** Format: uri */ + url: string; + /** Format: uri */ + import_url: string; + }; + /** + * Porter Large File + * @description Porter Large File + */ + "porter-large-file": { + ref_name: string; + path: string; + oid: string; + size: number; + }; + /** + * Issue + * @description Issues are a great way to keep track of tasks, enhancements, and bugs for your projects. + */ + "nullable-issue": { + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue + * @example https://api.github.com/repositories/42/issues/1 + */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + /** + * @description Number uniquely identifying the issue within its repository + * @example 42 + */ + number: number; + /** + * @description State of the issue; either 'open' or 'closed' + * @example open + */ + state: string; + /** + * @description The reason for the current state + * @example not_planned + * @enum {string|null} + */ + state_reason?: ("completed" | "reopened" | "not_planned") | null; + /** + * @description Title of the issue + * @example Widget creation fails in Safari on OS X 10.8 + */ + title: string; + /** + * @description Contents of the issue + * @example It looks like the new widget form is broken on Safari. When I try and create the widget, Safari crashes. This is reproducible on 10.8, but not 10.9. Maybe a browser bug? + */ + body?: string | null; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description Labels to associate with this issue; pass one or more label names to replace the set of labels on this issue; send an empty array to clear all labels from the issue; note that the labels are silently dropped for users without push access to the repository + * @example [ + * "bug", + * "registration" + * ] + */ + labels: ( + | string + | { + /** Format: int64 */ + id?: number; + node_id?: string; + /** Format: uri */ + url?: string; + name?: string; + description?: string | null; + color?: string | null; + default?: boolean; + } + )[]; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + milestone: components["schemas"]["nullable-milestone"]; + locked: boolean; + active_lock_reason?: string | null; + comments: number; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + /** Format: date-time */ + closed_at: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + draft?: boolean; + closed_by?: components["schemas"]["nullable-simple-user"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + repository?: components["schemas"]["repository"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + author_association: components["schemas"]["author-association"]; + reactions?: components["schemas"]["reaction-rollup"]; + } | null; + /** + * Issue Event Label + * @description Issue Event Label + */ + "issue-event-label": { + name: string | null; + color: string | null; + }; + /** Issue Event Dismissed Review */ + "issue-event-dismissed-review": { + state: string; + review_id: number; + dismissal_message: string | null; + dismissal_commit_id?: string | null; + }; + /** + * Issue Event Milestone + * @description Issue Event Milestone + */ + "issue-event-milestone": { + title: string; + }; + /** + * Issue Event Project Card + * @description Issue Event Project Card + */ + "issue-event-project-card": { + /** Format: uri */ + url: string; + id: number; + /** Format: uri */ + project_url: string; + project_id: number; + column_name: string; + previous_column_name?: string; + }; + /** + * Issue Event Rename + * @description Issue Event Rename + */ + "issue-event-rename": { + from: string; + to: string; + }; + /** + * Issue Event + * @description Issue Event + */ + "issue-event": { + /** @example 1 */ + id: number; + /** @example MDEwOklzc3VlRXZlbnQx */ + node_id: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/events/1 + */ + url: string; + actor: components["schemas"]["nullable-simple-user"]; + /** @example closed */ + event: string; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_id: string | null; + /** @example https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_url: string | null; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + issue?: components["schemas"]["nullable-issue"]; + label?: components["schemas"]["issue-event-label"]; + assignee?: components["schemas"]["nullable-simple-user"]; + assigner?: components["schemas"]["nullable-simple-user"]; + review_requester?: components["schemas"]["nullable-simple-user"]; + requested_reviewer?: components["schemas"]["nullable-simple-user"]; + requested_team?: components["schemas"]["team"]; + dismissed_review?: components["schemas"]["issue-event-dismissed-review"]; + milestone?: components["schemas"]["issue-event-milestone"]; + project_card?: components["schemas"]["issue-event-project-card"]; + rename?: components["schemas"]["issue-event-rename"]; + author_association?: components["schemas"]["author-association"]; + lock_reason?: string | null; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + }; + /** + * Labeled Issue Event + * @description Labeled Issue Event + */ + "labeled-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + label: { + name: string; + color: string; + }; + }; + /** + * Unlabeled Issue Event + * @description Unlabeled Issue Event + */ + "unlabeled-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + label: { + name: string; + color: string; + }; + }; + /** + * Assigned Issue Event + * @description Assigned Issue Event + */ + "assigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["integration"]; + assignee: components["schemas"]["simple-user"]; + assigner: components["schemas"]["simple-user"]; + }; + /** + * Unassigned Issue Event + * @description Unassigned Issue Event + */ + "unassigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + assigner: components["schemas"]["simple-user"]; + }; + /** + * Milestoned Issue Event + * @description Milestoned Issue Event + */ + "milestoned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + milestone: { + title: string; + }; + }; + /** + * Demilestoned Issue Event + * @description Demilestoned Issue Event + */ + "demilestoned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + milestone: { + title: string; + }; + }; + /** + * Renamed Issue Event + * @description Renamed Issue Event + */ + "renamed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + rename: { + from: string; + to: string; + }; + }; + /** + * Review Requested Issue Event + * @description Review Requested Issue Event + */ + "review-requested-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + review_requester: components["schemas"]["simple-user"]; + requested_team?: components["schemas"]["team"]; + requested_reviewer?: components["schemas"]["simple-user"]; + }; + /** + * Review Request Removed Issue Event + * @description Review Request Removed Issue Event + */ + "review-request-removed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + review_requester: components["schemas"]["simple-user"]; + requested_team?: components["schemas"]["team"]; + requested_reviewer?: components["schemas"]["simple-user"]; + }; + /** + * Review Dismissed Issue Event + * @description Review Dismissed Issue Event + */ + "review-dismissed-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + dismissed_review: { + state: string; + review_id: number; + dismissal_message: string | null; + dismissal_commit_id?: string; + }; + }; + /** + * Locked Issue Event + * @description Locked Issue Event + */ + "locked-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + /** @example "off-topic" */ + lock_reason: string | null; + }; + /** + * Added to Project Issue Event + * @description Added to Project Issue Event + */ + "added-to-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Moved Column in Project Issue Event + * @description Moved Column in Project Issue Event + */ + "moved-column-in-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Removed from Project Issue Event + * @description Removed from Project Issue Event + */ + "removed-from-project-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Converted Note to Issue Issue Event + * @description Converted Note to Issue Issue Event + */ + "converted-note-to-issue-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["integration"]; + project_card?: { + id: number; + /** Format: uri */ + url: string; + project_id: number; + /** Format: uri */ + project_url: string; + column_name: string; + previous_column_name?: string; + }; + }; + /** + * Issue Event for Issue + * @description Issue Event for Issue + */ + "issue-event-for-issue": Partial< + components["schemas"]["labeled-issue-event"] + > & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial; + /** + * Label + * @description Color-coded labels help you categorize and filter your issues (just like labels in Gmail). + */ + label: { + /** + * Format: int64 + * @example 208045946 + */ + id: number; + /** @example MDU6TGFiZWwyMDgwNDU5NDY= */ + node_id: string; + /** + * Format: uri + * @description URL for the label + * @example https://api.github.com/repositories/42/labels/bug + */ + url: string; + /** + * @description The name of the label. + * @example bug + */ + name: string; + /** @example Something isn't working */ + description: string | null; + /** + * @description 6-character hex code, without the leading #, identifying the color + * @example FFFFFF + */ + color: string; + /** @example true */ + default: boolean; + }; + /** + * Timeline Comment Event + * @description Timeline Comment Event + */ + "timeline-comment-event": { + event: string; + actor: components["schemas"]["simple-user"]; + /** + * @description Unique identifier of the issue comment + * @example 42 + */ + id: number; + node_id: string; + /** + * Format: uri + * @description URL for the issue comment + * @example https://api.github.com/repositories/42/issues/comments/1 + */ + url: string; + /** + * @description Contents of the issue comment + * @example What version of Safari were you using when you observed this bug? + */ + body?: string; + body_text?: string; + body_html?: string; + /** Format: uri */ + html_url: string; + user: components["schemas"]["simple-user"]; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** Format: uri */ + issue_url: string; + author_association: components["schemas"]["author-association"]; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Timeline Cross Referenced Event + * @description Timeline Cross Referenced Event + */ + "timeline-cross-referenced-event": { + event: string; + actor?: components["schemas"]["simple-user"]; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + source: { + type?: string; + issue?: components["schemas"]["issue"]; + }; + }; + /** + * Timeline Committed Event + * @description Timeline Committed Event + */ + "timeline-committed-event": { + event?: string; + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + node_id: string; + /** Format: uri */ + url: string; + /** @description Identifying information for the git-user */ + author: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** @description Identifying information for the git-user */ + committer: { + /** + * Format: date-time + * @description Timestamp of the commit + * @example 2014-08-09T08:02:04+12:00 + */ + date: string; + /** + * @description Git email address of the user + * @example monalisa.octocat@example.com + */ + email: string; + /** + * @description Name of the git user + * @example Monalisa Octocat + */ + name: string; + }; + /** + * @description Message describing the purpose of the commit + * @example Fix #42 + */ + message: string; + tree: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + }; + parents: { + /** + * @description SHA for the commit + * @example 7638417db6d59f3c431d3e1f261cc637155684cd + */ + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + }[]; + verification: { + verified: boolean; + reason: string; + signature: string | null; + payload: string | null; + }; + /** Format: uri */ + html_url: string; + }; + /** + * Timeline Reviewed Event + * @description Timeline Reviewed Event + */ + "timeline-reviewed-event": { + event: string; + /** + * @description Unique identifier of the review + * @example 42 + */ + id: number; + /** @example MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA= */ + node_id: string; + user: components["schemas"]["simple-user"]; + /** + * @description The text of the review. + * @example This looks great. + */ + body: string | null; + /** @example CHANGES_REQUESTED */ + state: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/12#pullrequestreview-80 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/12 + */ + pull_request_url: string; + _links: { + html: { + href: string; + }; + pull_request: { + href: string; + }; + }; + /** Format: date-time */ + submitted_at?: string; + /** + * @description A commit SHA for the review. + * @example 54bb654c9e6025347f57900a4a5c2313a96b8035 + */ + commit_id: string; + body_html?: string; + body_text?: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Pull Request Review Comment + * @description Pull Request Review Comments are comments on a portion of the Pull Request's diff. + */ + "pull-request-review-comment": { + /** + * @description URL for the pull request review comment + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + url: string; + /** + * @description The ID of the pull request review to which the comment belongs. + * @example 42 + */ + pull_request_review_id: number | null; + /** + * @description The ID of the pull request review comment. + * @example 1 + */ + id: number; + /** + * @description The node ID of the pull request review comment. + * @example MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDEw + */ + node_id: string; + /** + * @description The diff of the line that the comment refers to. + * @example @@ -16,33 +16,40 @@ public class Connection : IConnection... + */ + diff_hunk: string; + /** + * @description The relative path of the file to which the comment applies. + * @example config/database.yaml + */ + path: string; + /** + * @description The line index in the diff to which the comment applies. This field is deprecated; use `line` instead. + * @example 1 + */ + position: number; + /** + * @description The index of the original line in the diff to which the comment applies. This field is deprecated; use `original_line` instead. + * @example 4 + */ + original_position: number; + /** + * @description The SHA of the commit to which the comment applies. + * @example 6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + commit_id: string; + /** + * @description The SHA of the original commit to which the comment applies. + * @example 9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840 + */ + original_commit_id: string; + /** + * @description The comment ID to reply to. + * @example 8 + */ + in_reply_to_id?: number; + user: components["schemas"]["simple-user"]; + /** + * @description The text of the comment. + * @example We should probably include a check for null values here. + */ + body: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** + * Format: uri + * @description HTML URL for the pull request review comment. + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + html_url: string; + /** + * Format: uri + * @description URL for the pull request that the review comment belongs to. + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + pull_request_url: string; + author_association: components["schemas"]["author-association"]; + _links: { + self: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + href: string; + }; + html: { + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + href: string; + }; + pull_request: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + href: string; + }; + }; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + start_line?: number | null; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + original_start_line?: number | null; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side?: ("LEFT" | "RIGHT") | null; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + line?: number; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + original_line?: number; + /** + * @description The side of the diff to which the comment applies. The side of the last line of the range for a multi-line comment + * @default RIGHT + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + reactions?: components["schemas"]["reaction-rollup"]; + /** @example "

comment body

" */ + body_html?: string; + /** @example "comment body" */ + body_text?: string; + }; + /** + * Timeline Line Commented Event + * @description Timeline Line Commented Event + */ + "timeline-line-commented-event": { + event?: string; + node_id?: string; + comments?: components["schemas"]["pull-request-review-comment"][]; + }; + /** + * Timeline Commit Commented Event + * @description Timeline Commit Commented Event + */ + "timeline-commit-commented-event": { + event?: string; + node_id?: string; + commit_id?: string; + comments?: components["schemas"]["commit-comment"][]; + }; + /** + * Timeline Assigned Issue Event + * @description Timeline Assigned Issue Event + */ + "timeline-assigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + }; + /** + * Timeline Unassigned Issue Event + * @description Timeline Unassigned Issue Event + */ + "timeline-unassigned-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + assignee: components["schemas"]["simple-user"]; + }; + /** + * State Change Issue Event + * @description State Change Issue Event + */ + "state-change-issue-event": { + id: number; + node_id: string; + url: string; + actor: components["schemas"]["simple-user"]; + event: string; + commit_id: string | null; + commit_url: string | null; + created_at: string; + performed_via_github_app: components["schemas"]["nullable-integration"]; + state_reason?: string | null; + }; + /** + * Timeline Event + * @description Timeline Event + */ + "timeline-issue-events": Partial< + components["schemas"]["labeled-issue-event"] + > & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial & + Partial; + /** + * Deploy Key + * @description An SSH key granting access to a single repository. + */ + "deploy-key": { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + added_by?: string | null; + last_used?: string | null; + }; + /** + * Language + * @description Language + */ + language: { [key: string]: number }; + /** + * License Content + * @description License Content + */ + "license-content": { + name: string; + path: string; + sha: string; + size: number; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + git_url: string | null; + /** Format: uri */ + download_url: string | null; + type: string; + content: string; + encoding: string; + _links: { + /** Format: uri */ + git: string | null; + /** Format: uri */ + html: string | null; + /** Format: uri */ + self: string; + }; + license: components["schemas"]["nullable-license-simple"]; + }; + /** + * Merged upstream + * @description Results of a successful merge upstream request + */ + "merged-upstream": { + message?: string; + /** @enum {string} */ + merge_type?: "merge" | "fast-forward" | "none"; + base_branch?: string; + }; + /** + * Milestone + * @description A collection of related issues and pull requests. + */ + milestone: { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1 + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/milestones/v1.0 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/milestones/1/labels + */ + labels_url: string; + /** @example 1002604 */ + id: number; + /** @example MDk6TWlsZXN0b25lMTAwMjYwNA== */ + node_id: string; + /** + * @description The number of the milestone. + * @example 42 + */ + number: number; + /** + * @description The state of the milestone. + * @default open + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** + * @description The title of the milestone. + * @example v1.0 + */ + title: string; + /** @example Tracking milestone for version 1.0 */ + description: string | null; + creator: components["schemas"]["nullable-simple-user"]; + /** @example 4 */ + open_issues: number; + /** @example 8 */ + closed_issues: number; + /** + * Format: date-time + * @example 2011-04-10T20:09:31Z + */ + created_at: string; + /** + * Format: date-time + * @example 2014-03-03T18:58:10Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2013-02-12T13:22:01Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2012-10-09T23:39:01Z + */ + due_on: string | null; + }; + /** Pages Source Hash */ + "pages-source-hash": { + branch: string; + path: string; + }; + /** Pages Https Certificate */ + "pages-https-certificate": { + /** + * @example approved + * @enum {string} + */ + state: + | "new" + | "authorization_created" + | "authorization_pending" + | "authorized" + | "authorization_revoked" + | "issued" + | "uploaded" + | "approved" + | "errored" + | "bad_authz" + | "destroy_pending" + | "dns_changed"; + /** @example Certificate is approved */ + description: string; + /** + * @description Array of the domain set and its alternate name (if it is configured) + * @example [ + * "example.com", + * "www.example.com" + * ] + */ + domains: string[]; + /** Format: date */ + expires_at?: string; + }; + /** + * GitHub Pages + * @description The configuration for GitHub Pages for a repository. + */ + page: { + /** + * Format: uri + * @description The API address for accessing this Page resource. + * @example https://api.github.com/repos/github/hello-world/pages + */ + url: string; + /** + * @description The status of the most recent build of the Page. + * @example built + * @enum {string|null} + */ + status: ("built" | "building" | "errored") | null; + /** + * @description The Pages site's custom domain + * @example example.com + */ + cname: string | null; + /** + * @description The state if the domain is verified + * @example pending + * @enum {string|null} + */ + protected_domain_state?: ("pending" | "verified" | "unverified") | null; + /** + * Format: date-time + * @description The timestamp when a pending domain becomes unverified. + */ + pending_domain_unverified_at?: string | null; + /** + * @description Whether the Page has a custom 404 page. + * @default false + * @example false + */ + custom_404: boolean; + /** + * Format: uri + * @description The web address the Page can be accessed from. + * @example https://example.com + */ + html_url?: string; + /** + * @description The process in which the Page will be built. + * @example legacy + * @enum {string|null} + */ + build_type?: ("legacy" | "workflow") | null; + source?: components["schemas"]["pages-source-hash"]; + /** + * @description Whether the GitHub Pages site is publicly visible. If set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. + * @example true + */ + public: boolean; + https_certificate?: components["schemas"]["pages-https-certificate"]; + /** + * @description Whether https is enabled on the domain + * @example true + */ + https_enforced?: boolean; + }; + /** + * Page Build + * @description Page Build + */ + "page-build": { + /** Format: uri */ + url: string; + status: string; + error: { + message: string | null; + }; + pusher: components["schemas"]["nullable-simple-user"]; + commit: string; + duration: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + }; + /** + * Page Build Status + * @description Page Build Status + */ + "page-build-status": { + /** + * Format: uri + * @example https://api.github.com/repos/github/hello-world/pages/builds/latest + */ + url: string; + /** @example queued */ + status: string; + }; + /** + * GitHub Pages + * @description The GitHub Pages deployment status. + */ + "page-deployment": { + /** + * Format: uri + * @description The URI to monitor GitHub Pages deployment status. + * @example https://api.github.com/repos/github/hello-world/pages/deployments/4fd754f7e594640989b406850d0bc8f06a121251/status + */ + status_url: string; + /** + * Format: uri + * @description The URI to the deployed GitHub Pages. + * @example hello-world.github.io + */ + page_url: string; + /** + * Format: uri + * @description The URI to the deployed GitHub Pages preview. + * @example monalisa-1231a2312sa32-23sda74.drafts.github.io + */ + preview_url?: string; + }; + /** + * Pages Health Check Status + * @description Pages Health Check Status + */ + "pages-health-check": { + domain?: { + host?: string; + uri?: string; + nameservers?: string; + dns_resolves?: boolean; + is_proxied?: boolean | null; + is_cloudflare_ip?: boolean | null; + is_fastly_ip?: boolean | null; + is_old_ip_address?: boolean | null; + is_a_record?: boolean | null; + has_cname_record?: boolean | null; + has_mx_records_present?: boolean | null; + is_valid_domain?: boolean; + is_apex_domain?: boolean; + should_be_a_record?: boolean | null; + is_cname_to_github_user_domain?: boolean | null; + is_cname_to_pages_dot_github_dot_com?: boolean | null; + is_cname_to_fastly?: boolean | null; + is_pointed_to_github_pages_ip?: boolean | null; + is_non_github_pages_ip_present?: boolean | null; + is_pages_domain?: boolean; + is_served_by_pages?: boolean | null; + is_valid?: boolean; + reason?: string | null; + responds_to_https?: boolean; + enforces_https?: boolean; + https_error?: string | null; + is_https_eligible?: boolean | null; + caa_error?: string | null; + }; + alt_domain?: { + host?: string; + uri?: string; + nameservers?: string; + dns_resolves?: boolean; + is_proxied?: boolean | null; + is_cloudflare_ip?: boolean | null; + is_fastly_ip?: boolean | null; + is_old_ip_address?: boolean | null; + is_a_record?: boolean | null; + has_cname_record?: boolean | null; + has_mx_records_present?: boolean | null; + is_valid_domain?: boolean; + is_apex_domain?: boolean; + should_be_a_record?: boolean | null; + is_cname_to_github_user_domain?: boolean | null; + is_cname_to_pages_dot_github_dot_com?: boolean | null; + is_cname_to_fastly?: boolean | null; + is_pointed_to_github_pages_ip?: boolean | null; + is_non_github_pages_ip_present?: boolean | null; + is_pages_domain?: boolean; + is_served_by_pages?: boolean | null; + is_valid?: boolean; + reason?: string | null; + responds_to_https?: boolean; + enforces_https?: boolean; + https_error?: string | null; + is_https_eligible?: boolean | null; + caa_error?: string | null; + } | null; + }; + /** + * Pull Request + * @description Pull requests let you tell others about changes you've pushed to a repository on GitHub. Once a pull request is sent, interested parties can review the set of changes, discuss potential modifications, and even push follow-up commits if necessary. + */ + "pull-request": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347 + */ + url: string; + /** @example 1 */ + id: number; + /** @example MDExOlB1bGxSZXF1ZXN0MQ== */ + node_id: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347 + */ + html_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.diff + */ + diff_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1347.patch + */ + patch_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347 + */ + issue_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits + */ + commits_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments + */ + review_comments_url: string; + /** @example https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number} */ + review_comment_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/issues/1347/comments + */ + comments_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e + */ + statuses_url: string; + /** + * @description Number uniquely identifying the pull request within its repository. + * @example 42 + */ + number: number; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @example open + * @enum {string} + */ + state: "open" | "closed"; + /** @example true */ + locked: boolean; + /** + * @description The title of the pull request. + * @example Amazing new feature + */ + title: string; + user: components["schemas"]["nullable-simple-user"]; + /** @example Please pull these awesome changes */ + body: string | null; + labels: { + /** Format: int64 */ + id: number; + node_id: string; + url: string; + name: string; + description: string | null; + color: string; + default: boolean; + }[]; + milestone: components["schemas"]["nullable-milestone"]; + /** @example too heated */ + active_lock_reason?: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + updated_at: string; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + closed_at: string | null; + /** + * Format: date-time + * @example 2011-01-26T19:01:12Z + */ + merged_at: string | null; + /** @example e5bd3914e2e596debea16f433f57875b5b90bcd6 */ + merge_commit_sha: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + assignees?: components["schemas"]["simple-user"][] | null; + requested_reviewers?: components["schemas"]["simple-user"][] | null; + requested_teams?: components["schemas"]["team-simple"][] | null; + head: { + label: string; + ref: string; + repo: { + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + milestones_url: string; + name: string; + notifications_url: string; + owner: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + private: boolean; + pulls_url: string; + releases_url: string; + /** Format: uri */ + stargazers_url: string; + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + trees_url: string; + /** Format: uri */ + url: string; + clone_url: string; + default_branch: string; + forks: number; + forks_count: number; + git_url: string; + has_downloads: boolean; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + /** Format: uri */ + homepage: string | null; + language: string | null; + master_branch?: string; + archived: boolean; + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + /** Format: uri */ + mirror_url: string | null; + open_issues: number; + open_issues_count: number; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + license: { + key: string; + name: string; + /** Format: uri */ + url: string | null; + spdx_id: string | null; + node_id: string; + } | null; + /** Format: date-time */ + pushed_at: string; + size: number; + ssh_url: string; + stargazers_count: number; + /** Format: uri */ + svn_url: string; + topics?: string[]; + watchers: number; + watchers_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + allow_forking?: boolean; + is_template?: boolean; + web_commit_signoff_required?: boolean; + } | null; + sha: string; + user: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + }; + base: { + label: string; + ref: string; + repo: { + archive_url: string; + assignees_url: string; + blobs_url: string; + branches_url: string; + collaborators_url: string; + comments_url: string; + commits_url: string; + compare_url: string; + contents_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + deployments_url: string; + description: string | null; + /** Format: uri */ + downloads_url: string; + /** Format: uri */ + events_url: string; + fork: boolean; + /** Format: uri */ + forks_url: string; + full_name: string; + git_commits_url: string; + git_refs_url: string; + git_tags_url: string; + /** Format: uri */ + hooks_url: string; + /** Format: uri */ + html_url: string; + id: number; + is_template?: boolean; + node_id: string; + issue_comment_url: string; + issue_events_url: string; + issues_url: string; + keys_url: string; + labels_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + merges_url: string; + milestones_url: string; + name: string; + notifications_url: string; + owner: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + private: boolean; + pulls_url: string; + releases_url: string; + /** Format: uri */ + stargazers_url: string; + statuses_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + /** Format: uri */ + tags_url: string; + /** Format: uri */ + teams_url: string; + trees_url: string; + /** Format: uri */ + url: string; + clone_url: string; + default_branch: string; + forks: number; + forks_count: number; + git_url: string; + has_downloads: boolean; + has_issues: boolean; + has_projects: boolean; + has_wiki: boolean; + has_pages: boolean; + /** Format: uri */ + homepage: string | null; + language: string | null; + master_branch?: string; + archived: boolean; + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + /** Format: uri */ + mirror_url: string | null; + open_issues: number; + open_issues_count: number; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + license: components["schemas"]["nullable-license-simple"]; + /** Format: date-time */ + pushed_at: string; + size: number; + ssh_url: string; + stargazers_count: number; + /** Format: uri */ + svn_url: string; + topics?: string[]; + watchers: number; + watchers_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + allow_forking?: boolean; + web_commit_signoff_required?: boolean; + }; + sha: string; + user: { + /** Format: uri */ + avatar_url: string; + events_url: string; + /** Format: uri */ + followers_url: string; + following_url: string; + gists_url: string; + gravatar_id: string | null; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + login: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + received_events_url: string; + /** Format: uri */ + repos_url: string; + site_admin: boolean; + starred_url: string; + /** Format: uri */ + subscriptions_url: string; + type: string; + /** Format: uri */ + url: string; + }; + }; + _links: { + comments: components["schemas"]["link"]; + commits: components["schemas"]["link"]; + statuses: components["schemas"]["link"]; + html: components["schemas"]["link"]; + issue: components["schemas"]["link"]; + review_comments: components["schemas"]["link"]; + review_comment: components["schemas"]["link"]; + self: components["schemas"]["link"]; + }; + author_association: components["schemas"]["author-association"]; + auto_merge: components["schemas"]["auto-merge"]; + /** + * @description Indicates whether or not the pull request is a draft. + * @example false + */ + draft?: boolean; + merged: boolean; + /** @example true */ + mergeable: boolean | null; + /** @example true */ + rebaseable?: boolean | null; + /** @example clean */ + mergeable_state: string; + merged_by: components["schemas"]["nullable-simple-user"]; + /** @example 10 */ + comments: number; + /** @example 0 */ + review_comments: number; + /** + * @description Indicates whether maintainers can modify the pull request. + * @example true + */ + maintainer_can_modify: boolean; + /** @example 3 */ + commits: number; + /** @example 100 */ + additions: number; + /** @example 3 */ + deletions: number; + /** @example 5 */ + changed_files: number; + }; + /** + * Pull Request Merge Result + * @description Pull Request Merge Result + */ + "pull-request-merge-result": { + sha: string; + merged: boolean; + message: string; + }; + /** + * Pull Request Review Request + * @description Pull Request Review Request + */ + "pull-request-review-request": { + users: components["schemas"]["simple-user"][]; + teams: components["schemas"]["team"][]; + }; + /** + * Pull Request Review + * @description Pull Request Reviews are reviews on pull requests. + */ + "pull-request-review": { + /** + * @description Unique identifier of the review + * @example 42 + */ + id: number; + /** @example MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA= */ + node_id: string; + user: components["schemas"]["nullable-simple-user"]; + /** + * @description The text of the review. + * @example This looks great. + */ + body: string; + /** @example CHANGES_REQUESTED */ + state: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/12#pullrequestreview-80 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/12 + */ + pull_request_url: string; + _links: { + html: { + href: string; + }; + pull_request: { + href: string; + }; + }; + /** Format: date-time */ + submitted_at?: string; + /** + * @description A commit SHA for the review. + * @example 54bb654c9e6025347f57900a4a5c2313a96b8035 + */ + commit_id: string; + body_html?: string; + body_text?: string; + author_association: components["schemas"]["author-association"]; + }; + /** + * Legacy Review Comment + * @description Legacy Review Comment + */ + "review-comment": { + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/comments/1 + */ + url: string; + /** @example 42 */ + pull_request_review_id: number | null; + /** @example 10 */ + id: number; + /** @example MDI0OlB1bGxSZXF1ZXN0UmV2aWV3Q29tbWVudDEw */ + node_id: string; + /** @example @@ -16,33 +16,40 @@ public class Connection : IConnection... */ + diff_hunk: string; + /** @example file1.txt */ + path: string; + /** @example 1 */ + position: number | null; + /** @example 4 */ + original_position: number; + /** @example 6dcb09b5b57875f334f61aebed695e2e4193db5e */ + commit_id: string; + /** @example 9c48853fa3dc5c1c3d6f1f1cd1f2743e72652840 */ + original_commit_id: string; + /** @example 8 */ + in_reply_to_id?: number; + user: components["schemas"]["nullable-simple-user"]; + /** @example Great stuff */ + body: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + created_at: string; + /** + * Format: date-time + * @example 2011-04-14T16:00:49Z + */ + updated_at: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/pull/1#discussion-diff-1 + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/Hello-World/pulls/1 + */ + pull_request_url: string; + author_association: components["schemas"]["author-association"]; + _links: { + self: components["schemas"]["link"]; + html: components["schemas"]["link"]; + pull_request: components["schemas"]["link"]; + }; + body_text?: string; + body_html?: string; + reactions?: components["schemas"]["reaction-rollup"]; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** + * @description The side of the first line of the range for a multi-line comment. + * @default RIGHT + * @enum {string|null} + */ + start_side?: ("LEFT" | "RIGHT") | null; + /** + * @description The line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + line?: number; + /** + * @description The original line of the blob to which the comment applies. The last line of the range for a multi-line comment + * @example 2 + */ + original_line?: number; + /** + * @description The first line of the range for a multi-line comment. + * @example 2 + */ + start_line?: number | null; + /** + * @description The original first line of the range for a multi-line comment. + * @example 2 + */ + original_start_line?: number | null; + }; + /** + * Release Asset + * @description Data related to a release. + */ + "release-asset": { + /** Format: uri */ + url: string; + /** Format: uri */ + browser_download_url: string; + id: number; + node_id: string; + /** + * @description The file name of the asset. + * @example Team Environment + */ + name: string; + label: string | null; + /** + * @description State of the release asset. + * @enum {string} + */ + state: "uploaded" | "open"; + content_type: string; + size: number; + download_count: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + uploader: components["schemas"]["nullable-simple-user"]; + }; + /** + * Release + * @description A release. + */ + release: { + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + assets_url: string; + upload_url: string; + /** Format: uri */ + tarball_url: string | null; + /** Format: uri */ + zipball_url: string | null; + id: number; + node_id: string; + /** + * @description The name of the tag. + * @example v1.0.0 + */ + tag_name: string; + /** + * @description Specifies the commitish value that determines where the Git tag is created from. + * @example master + */ + target_commitish: string; + name: string | null; + body?: string | null; + /** + * @description true to create a draft (unpublished) release, false to create a published one. + * @example false + */ + draft: boolean; + /** + * @description Whether to identify the release as a prerelease or a full release. + * @example false + */ + prerelease: boolean; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + published_at: string | null; + author: components["schemas"]["simple-user"]; + assets: components["schemas"]["release-asset"][]; + body_html?: string; + body_text?: string; + mentions_count?: number; + /** + * Format: uri + * @description The URL of the release discussion. + */ + discussion_url?: string; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Generated Release Notes Content + * @description Generated name and body describing a release + */ + "release-notes-content": { + /** + * @description The generated name of the release + * @example Release v1.0.0 is now available! + */ + name: string; + /** @description The generated body describing the contents of the release supporting markdown formatting */ + body: string; + }; + "secret-scanning-alert": { + number?: components["schemas"]["alert-number"]; + created_at?: components["schemas"]["alert-created-at"]; + updated_at?: components["schemas"]["alert-updated-at"]; + url?: components["schemas"]["alert-url"]; + html_url?: components["schemas"]["alert-html-url"]; + /** + * Format: uri + * @description The REST API URL of the code locations for this alert. + */ + locations_url?: string; + state?: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + /** + * Format: date-time + * @description The time that the alert was resolved in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + resolved_at?: string | null; + resolved_by?: components["schemas"]["nullable-simple-user"]; + /** @description The type of secret that secret scanning detected. */ + secret_type?: string; + /** + * @description User-friendly name for the detected secret, matching the `secret_type`. + * For a list of built-in patterns, see "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)." + */ + secret_type_display_name?: string; + /** @description The secret that was detected. */ + secret?: string; + /** @description Whether push protection was bypassed for the detected secret. */ + push_protection_bypassed?: boolean | null; + push_protection_bypassed_by?: components["schemas"]["nullable-simple-user"]; + /** + * Format: date-time + * @description The time that push protection was bypassed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + push_protection_bypassed_at?: string | null; + /** @description The comment that was optionally added when this alert was closed */ + resolution_comment?: string | null; + }; + /** @description Sets an optional comment when closing an alert. Must be null when changing `state` to `open`. */ + "secret-scanning-alert-resolution-comment": string | null; + /** @description Represents a 'commit' secret scanning location type. This location type shows that a secret was detected inside a commit to a repository. */ + "secret-scanning-location-commit": { + /** + * @description The file path in the repository + * @example /example/secrets.txt + */ + path: string; + /** @description Line number at which the secret starts in the file */ + start_line: number; + /** @description Line number at which the secret ends in the file */ + end_line: number; + /** @description The column at which the secret starts within the start line when the file is interpreted as 8BIT ASCII */ + start_column: number; + /** @description The column at which the secret ends within the end line when the file is interpreted as 8BIT ASCII */ + end_column: number; + /** + * @description SHA-1 hash ID of the associated blob + * @example af5626b4a114abcb82d63db7c8082c3c4756e51b + */ + blob_sha: string; + /** @description The API URL to get the associated blob resource */ + blob_url: string; + /** + * @description SHA-1 hash ID of the associated commit + * @example af5626b4a114abcb82d63db7c8082c3c4756e51b + */ + commit_sha: string; + /** @description The API URL to get the associated commit resource */ + commit_url: string; + }; + "secret-scanning-location": { + /** + * @description The location type. Because secrets may be found in different types of resources (ie. code, comments, issues), this field identifies the type of resource where the secret was found. + * @example commit + * @enum {string} + */ + type: "commit"; + details: components["schemas"]["secret-scanning-location-commit"]; + }; + /** + * Stargazer + * @description Stargazer + */ + stargazer: { + /** Format: date-time */ + starred_at: string; + user: components["schemas"]["nullable-simple-user"]; + }; + /** + * Code Frequency Stat + * @description Code Frequency Stat + */ + "code-frequency-stat": number[]; + /** + * Commit Activity + * @description Commit Activity + */ + "commit-activity": { + /** + * @example [ + * 0, + * 3, + * 26, + * 20, + * 39, + * 1, + * 0 + * ] + */ + days: number[]; + /** @example 89 */ + total: number; + /** @example 1336280400 */ + week: number; + }; + /** + * Contributor Activity + * @description Contributor Activity + */ + "contributor-activity": { + author: components["schemas"]["nullable-simple-user"]; + /** @example 135 */ + total: number; + /** + * @example [ + * { + * "w": "1367712000", + * "a": 6898, + * "d": 77, + * "c": 10 + * } + * ] + */ + weeks: { + w?: number; + a?: number; + d?: number; + c?: number; + }[]; + }; + /** Participation Stats */ + "participation-stats": { + all: number[]; + owner: number[]; + }; + /** + * Repository Invitation + * @description Repository invitations let you manage who you collaborate with. + */ + "repository-subscription": { + /** + * @description Determines if notifications should be received from this repository. + * @example true + */ + subscribed: boolean; + /** @description Determines if all notifications should be blocked from this repository. */ + ignored: boolean; + reason: string | null; + /** + * Format: date-time + * @example 2012-10-06T21:34:12Z + */ + created_at: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example/subscription + */ + url: string; + /** + * Format: uri + * @example https://api.github.com/repos/octocat/example + */ + repository_url: string; + }; + /** + * Tag + * @description Tag + */ + tag: { + /** @example v0.1 */ + name: string; + commit: { + sha: string; + /** Format: uri */ + url: string; + }; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/zipball/v0.1 + */ + zipball_url: string; + /** + * Format: uri + * @example https://github.com/octocat/Hello-World/tarball/v0.1 + */ + tarball_url: string; + node_id: string; + }; + /** + * Tag protection + * @description Tag protection + */ + "tag-protection": { + /** @example 2 */ + id?: number; + /** @example 2011-01-26T19:01:12Z */ + created_at?: string; + /** @example 2011-01-26T19:01:12Z */ + updated_at?: string; + /** @example true */ + enabled?: boolean; + /** @example v1.* */ + pattern: string; + }; + /** + * Topic + * @description A topic aggregates entities that are related to a subject. + */ + topic: { + names: string[]; + }; + /** Traffic */ + traffic: { + /** Format: date-time */ + timestamp: string; + uniques: number; + count: number; + }; + /** + * Clone Traffic + * @description Clone Traffic + */ + "clone-traffic": { + /** @example 173 */ + count: number; + /** @example 128 */ + uniques: number; + clones: components["schemas"]["traffic"][]; + }; + /** + * Content Traffic + * @description Content Traffic + */ + "content-traffic": { + /** @example /github/hubot */ + path: string; + /** @example github/hubot: A customizable life embetterment robot. */ + title: string; + /** @example 3542 */ + count: number; + /** @example 2225 */ + uniques: number; + }; + /** + * Referrer Traffic + * @description Referrer Traffic + */ + "referrer-traffic": { + /** @example Google */ + referrer: string; + /** @example 4 */ + count: number; + /** @example 3 */ + uniques: number; + }; + /** + * View Traffic + * @description View Traffic + */ + "view-traffic": { + /** @example 14850 */ + count: number; + /** @example 3782 */ + uniques: number; + views: components["schemas"]["traffic"][]; + }; + /** Search Result Text Matches */ + "search-result-text-matches": { + object_url?: string; + object_type?: string | null; + property?: string; + fragment?: string; + matches?: { + text?: string; + indices?: number[]; + }[]; + }[]; + /** + * Code Search Result Item + * @description Code Search Result Item + */ + "code-search-result-item": { + name: string; + path: string; + sha: string; + /** Format: uri */ + url: string; + /** Format: uri */ + git_url: string; + /** Format: uri */ + html_url: string; + repository: components["schemas"]["minimal-repository"]; + score: number; + file_size?: number; + language?: string | null; + /** Format: date-time */ + last_modified_at?: string; + /** + * @example [ + * "73..77", + * "77..78" + * ] + */ + line_numbers?: string[]; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Commit Search Result Item + * @description Commit Search Result Item + */ + "commit-search-result-item": { + /** Format: uri */ + url: string; + sha: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + comments_url: string; + commit: { + author: { + name: string; + email: string; + /** Format: date-time */ + date: string; + }; + committer: components["schemas"]["nullable-git-user"]; + comment_count: number; + message: string; + tree: { + sha: string; + /** Format: uri */ + url: string; + }; + /** Format: uri */ + url: string; + verification?: components["schemas"]["verification"]; + }; + author: components["schemas"]["nullable-simple-user"]; + committer: components["schemas"]["nullable-git-user"]; + parents: { + url?: string; + html_url?: string; + sha?: string; + }[]; + repository: components["schemas"]["minimal-repository"]; + score: number; + node_id: string; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Issue Search Result Item + * @description Issue Search Result Item + */ + "issue-search-result-item": { + /** Format: uri */ + url: string; + /** Format: uri */ + repository_url: string; + labels_url: string; + /** Format: uri */ + comments_url: string; + /** Format: uri */ + events_url: string; + /** Format: uri */ + html_url: string; + id: number; + node_id: string; + number: number; + title: string; + locked: boolean; + active_lock_reason?: string | null; + assignees?: components["schemas"]["simple-user"][] | null; + user: components["schemas"]["nullable-simple-user"]; + labels: { + /** Format: int64 */ + id?: number; + node_id?: string; + url?: string; + name?: string; + color?: string; + default?: boolean; + description?: string | null; + }[]; + state: string; + state_reason?: string | null; + assignee: components["schemas"]["nullable-simple-user"]; + milestone: components["schemas"]["nullable-milestone"]; + comments: number; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + closed_at: string | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + pull_request?: { + /** Format: date-time */ + merged_at?: string | null; + /** Format: uri */ + diff_url: string | null; + /** Format: uri */ + html_url: string | null; + /** Format: uri */ + patch_url: string | null; + /** Format: uri */ + url: string | null; + }; + body?: string; + score: number; + author_association: components["schemas"]["author-association"]; + draft?: boolean; + repository?: components["schemas"]["repository"]; + body_html?: string; + body_text?: string; + /** Format: uri */ + timeline_url?: string; + performed_via_github_app?: components["schemas"]["nullable-integration"]; + reactions?: components["schemas"]["reaction-rollup"]; + }; + /** + * Label Search Result Item + * @description Label Search Result Item + */ + "label-search-result-item": { + id: number; + node_id: string; + /** Format: uri */ + url: string; + name: string; + color: string; + default: boolean; + description: string | null; + score: number; + text_matches?: components["schemas"]["search-result-text-matches"]; + }; + /** + * Repo Search Result Item + * @description Repo Search Result Item + */ + "repo-search-result-item": { + id: number; + node_id: string; + name: string; + full_name: string; + owner: components["schemas"]["nullable-simple-user"]; + private: boolean; + /** Format: uri */ + html_url: string; + description: string | null; + fork: boolean; + /** Format: uri */ + url: string; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + /** Format: date-time */ + pushed_at: string; + /** Format: uri */ + homepage: string | null; + size: number; + stargazers_count: number; + watchers_count: number; + language: string | null; + forks_count: number; + open_issues_count: number; + master_branch?: string; + default_branch: string; + score: number; + /** Format: uri */ + forks_url: string; + keys_url: string; + collaborators_url: string; + /** Format: uri */ + teams_url: string; + /** Format: uri */ + hooks_url: string; + issue_events_url: string; + /** Format: uri */ + events_url: string; + assignees_url: string; + branches_url: string; + /** Format: uri */ + tags_url: string; + blobs_url: string; + git_tags_url: string; + git_refs_url: string; + trees_url: string; + statuses_url: string; + /** Format: uri */ + languages_url: string; + /** Format: uri */ + stargazers_url: string; + /** Format: uri */ + contributors_url: string; + /** Format: uri */ + subscribers_url: string; + /** Format: uri */ + subscription_url: string; + commits_url: string; + git_commits_url: string; + comments_url: string; + issue_comment_url: string; + contents_url: string; + compare_url: string; + /** Format: uri */ + merges_url: string; + archive_url: string; + /** Format: uri */ + downloads_url: string; + issues_url: string; + pulls_url: string; + milestones_url: string; + notifications_url: string; + labels_url: string; + releases_url: string; + /** Format: uri */ + deployments_url: string; + git_url: string; + ssh_url: string; + clone_url: string; + /** Format: uri */ + svn_url: string; + forks: number; + open_issues: number; + watchers: number; + topics?: string[]; + /** Format: uri */ + mirror_url: string | null; + has_issues: boolean; + has_projects: boolean; + has_pages: boolean; + has_wiki: boolean; + has_downloads: boolean; + archived: boolean; + /** @description Returns whether or not this repository disabled. */ + disabled: boolean; + /** @description The repository visibility: public, private, or internal. */ + visibility?: string; + license: components["schemas"]["nullable-license-simple"]; + permissions?: { + admin: boolean; + maintain?: boolean; + push: boolean; + triage?: boolean; + pull: boolean; + }; + text_matches?: components["schemas"]["search-result-text-matches"]; + temp_clone_token?: string; + allow_merge_commit?: boolean; + allow_squash_merge?: boolean; + allow_rebase_merge?: boolean; + allow_auto_merge?: boolean; + delete_branch_on_merge?: boolean; + allow_forking?: boolean; + is_template?: boolean; + /** @example false */ + web_commit_signoff_required?: boolean; + }; + /** + * Topic Search Result Item + * @description Topic Search Result Item + */ + "topic-search-result-item": { + name: string; + display_name: string | null; + short_description: string | null; + description: string | null; + created_by: string | null; + released: string | null; + /** Format: date-time */ + created_at: string; + /** Format: date-time */ + updated_at: string; + featured: boolean; + curated: boolean; + score: number; + repository_count?: number | null; + /** Format: uri */ + logo_url?: string | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + related?: + | { + topic_relation?: { + id?: number; + name?: string; + topic_id?: number; + relation_type?: string; + }; + }[] + | null; + aliases?: + | { + topic_relation?: { + id?: number; + name?: string; + topic_id?: number; + relation_type?: string; + }; + }[] + | null; + }; + /** + * User Search Result Item + * @description User Search Result Item + */ + "user-search-result-item": { + login: string; + id: number; + node_id: string; + /** Format: uri */ + avatar_url: string; + gravatar_id: string | null; + /** Format: uri */ + url: string; + /** Format: uri */ + html_url: string; + /** Format: uri */ + followers_url: string; + /** Format: uri */ + subscriptions_url: string; + /** Format: uri */ + organizations_url: string; + /** Format: uri */ + repos_url: string; + /** Format: uri */ + received_events_url: string; + type: string; + score: number; + following_url: string; + gists_url: string; + starred_url: string; + events_url: string; + public_repos?: number; + public_gists?: number; + followers?: number; + following?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + name?: string | null; + bio?: string | null; + /** Format: email */ + email?: string | null; + location?: string | null; + site_admin: boolean; + hireable?: boolean | null; + text_matches?: components["schemas"]["search-result-text-matches"]; + blog?: string | null; + company?: string | null; + /** Format: date-time */ + suspended_at?: string | null; + }; + /** + * Private User + * @description Private User + */ + "private-user": { + /** @example octocat */ + login: string; + /** @example 1 */ + id: number; + /** @example MDQ6VXNlcjE= */ + node_id: string; + /** + * Format: uri + * @example https://github.com/images/error/octocat_happy.gif + */ + avatar_url: string; + /** @example 41d064eb2195891e12d0413f63227ea7 */ + gravatar_id: string | null; + /** + * Format: uri + * @example https://api.github.com/users/octocat + */ + url: string; + /** + * Format: uri + * @example https://github.com/octocat + */ + html_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/followers + */ + followers_url: string; + /** @example https://api.github.com/users/octocat/following{/other_user} */ + following_url: string; + /** @example https://api.github.com/users/octocat/gists{/gist_id} */ + gists_url: string; + /** @example https://api.github.com/users/octocat/starred{/owner}{/repo} */ + starred_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/subscriptions + */ + subscriptions_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/orgs + */ + organizations_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/repos + */ + repos_url: string; + /** @example https://api.github.com/users/octocat/events{/privacy} */ + events_url: string; + /** + * Format: uri + * @example https://api.github.com/users/octocat/received_events + */ + received_events_url: string; + /** @example User */ + type: string; + site_admin: boolean; + /** @example monalisa octocat */ + name: string | null; + /** @example GitHub */ + company: string | null; + /** @example https://github.com/blog */ + blog: string | null; + /** @example San Francisco */ + location: string | null; + /** + * Format: email + * @example octocat@github.com + */ + email: string | null; + hireable: boolean | null; + /** @example There once was... */ + bio: string | null; + /** @example monalisa */ + twitter_username?: string | null; + /** @example 2 */ + public_repos: number; + /** @example 1 */ + public_gists: number; + /** @example 20 */ + followers: number; + /** @example 0 */ + following: number; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + created_at: string; + /** + * Format: date-time + * @example 2008-01-14T04:33:35Z + */ + updated_at: string; + /** @example 81 */ + private_gists: number; + /** @example 100 */ + total_private_repos: number; + /** @example 100 */ + owned_private_repos: number; + /** @example 10000 */ + disk_usage: number; + /** @example 8 */ + collaborators: number; + /** @example true */ + two_factor_authentication: boolean; + plan?: { + collaborators: number; + name: string; + space: number; + private_repos: number; + }; + /** Format: date-time */ + suspended_at?: string | null; + business_plus?: boolean; + ldap_dn?: string; + }; + /** + * Codespaces Secret + * @description Secrets for a GitHub Codespace. + */ + "codespaces-secret": { + /** + * @description The name of the secret + * @example SECRET_NAME + */ + name: string; + /** + * Format: date-time + * @description The date and time at which the secret was created, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + created_at: string; + /** + * Format: date-time + * @description The date and time at which the secret was last updated, in ISO 8601 format':' YYYY-MM-DDTHH:MM:SSZ. + */ + updated_at: string; + /** + * @description The type of repositories in the organization that the secret is visible to + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** + * Format: uri + * @description The API URL at which the list of repositories this secret is visible to can be retrieved + * @example https://api.github.com/user/secrets/SECRET_NAME/repositories + */ + selected_repositories_url: string; + }; + /** + * CodespacesUserPublicKey + * @description The public key used for setting user Codespaces' Secrets. + */ + "codespaces-user-public-key": { + /** + * @description The identifier for the key. + * @example 1234567 + */ + key_id: string; + /** + * @description The Base64 encoded public key. + * @example hBT5WZEj8ZoOv6TYJsfWq7MxTEQopZO5/IT3ZCVQPzs= + */ + key: string; + }; + /** + * Fetches information about an export of a codespace. + * @description An export of a codespace. Also, latest export details for a codespace can be fetched with id = latest + */ + "codespace-export-details": { + /** + * @description State of the latest export + * @example succeeded | failed | in_progress + */ + state?: string | null; + /** + * Format: date-time + * @description Completion time of the last export operation + * @example 2021-01-01T19:01:12Z + */ + completed_at?: string | null; + /** + * @description Name of the exported branch + * @example codespace-monalisa-octocat-hello-world-g4wpq6h95q + */ + branch?: string | null; + /** + * @description Git commit SHA of the exported branch + * @example fd95a81ca01e48ede9f39c799ecbcef817b8a3b2 + */ + sha?: string | null; + /** + * @description Id for the export details + * @example latest + */ + id?: string; + /** + * @description Url for fetching export details + * @example https://api.github.com/user/codespaces/:name/exports/latest + */ + export_url?: string; + /** + * @description Web url for the exported branch + * @example https://github.com/octocat/hello-world/tree/:branch + */ + html_url?: string | null; + }; + /** + * Email + * @description Email + */ + email: { + /** + * Format: email + * @example octocat@github.com + */ + email: string; + /** @example true */ + primary: boolean; + /** @example true */ + verified: boolean; + /** @example public */ + visibility: string | null; + }; + /** + * GPG Key + * @description A unique encryption key + */ + "gpg-key": { + /** @example 3 */ + id: number; + /** @example Octocat's GPG Key */ + name?: string | null; + primary_key_id: number | null; + /** @example 3262EFF25BA0D270 */ + key_id: string; + /** @example xsBNBFayYZ... */ + public_key: string; + /** + * @example [ + * { + * "email": "octocat@users.noreply.github.com", + * "verified": true + * } + * ] + */ + emails: { + email?: string; + verified?: boolean; + }[]; + /** + * @example [ + * { + * "id": 4, + * "primary_key_id": 3, + * "key_id": "4A595D4C72EE49C7", + * "public_key": "zsBNBFayYZ...", + * "emails": [], + * "subkeys": [], + * "can_sign": false, + * "can_encrypt_comms": true, + * "can_encrypt_storage": true, + * "can_certify": false, + * "created_at": "2016-03-24T11:31:04-06:00", + * "expires_at": null, + * "revoked": false + * } + * ] + */ + subkeys: { + id?: number; + primary_key_id?: number; + key_id?: string; + public_key?: string; + emails?: unknown[]; + subkeys?: unknown[]; + can_sign?: boolean; + can_encrypt_comms?: boolean; + can_encrypt_storage?: boolean; + can_certify?: boolean; + created_at?: string; + expires_at?: string | null; + raw_key?: string | null; + revoked?: boolean; + }[]; + /** @example true */ + can_sign: boolean; + can_encrypt_comms: boolean; + can_encrypt_storage: boolean; + /** @example true */ + can_certify: boolean; + /** + * Format: date-time + * @example 2016-03-24T11:31:04-06:00 + */ + created_at: string; + /** Format: date-time */ + expires_at: string | null; + /** @example true */ + revoked: boolean; + raw_key: string | null; + }; + /** + * Key + * @description Key + */ + key: { + key: string; + id: number; + url: string; + title: string; + /** Format: date-time */ + created_at: string; + verified: boolean; + read_only: boolean; + }; + /** Marketplace Account */ + "marketplace-account": { + /** Format: uri */ + url: string; + id: number; + type: string; + node_id?: string; + login: string; + /** Format: email */ + email?: string | null; + /** Format: email */ + organization_billing_email?: string | null; + }; + /** + * User Marketplace Purchase + * @description User Marketplace Purchase + */ + "user-marketplace-purchase": { + /** @example monthly */ + billing_cycle: string; + /** + * Format: date-time + * @example 2017-11-11T00:00:00Z + */ + next_billing_date: string | null; + unit_count: number | null; + /** @example true */ + on_free_trial: boolean; + /** + * Format: date-time + * @example 2017-11-11T00:00:00Z + */ + free_trial_ends_on: string | null; + /** + * Format: date-time + * @example 2017-11-02T01:12:12Z + */ + updated_at: string | null; + account: components["schemas"]["marketplace-account"]; + plan: components["schemas"]["marketplace-listing-plan"]; + }; + /** + * SSH Signing Key + * @description A public SSH key used to sign Git commits + */ + "ssh-signing-key": { + key: string; + id: number; + title: string; + /** Format: date-time */ + created_at: string; + }; + /** + * Starred Repository + * @description Starred Repository + */ + "starred-repository": { + /** Format: date-time */ + starred_at: string; + repo: components["schemas"]["repository"]; + }; + /** + * Hovercard + * @description Hovercard + */ + hovercard: { + contexts: { + message: string; + octicon: string; + }[]; + }; + /** + * Key Simple + * @description Key Simple + */ + "key-simple": { + id: number; + key: string; + }; + /** + * Simple Installation + * @description Simple Installation + */ + "simple-installation": { + /** + * @description The ID of the installation. + * @example 1 + */ + id: number; + /** + * @description The global node ID of the installation. + * @example MDQ6VXNlcjU4MzIzMQ== + */ + node_id: string; + }; + "webhook-merge-group-checks-requested": { + action: string; + installation?: components["schemas"]["simple-installation"]; + merge_group: { + base_ref: string; + head_ref: string; + head_sha: string; + }; + organization?: components["schemas"]["organization-simple"]; + repository?: components["schemas"]["repository"]; + sender?: components["schemas"]["simple-user"]; + }; + }; + responses: { + /** Resource not found */ + not_found: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Validation failed, or the endpoint has been spammed. */ + validation_failed_simple: { + content: { + "application/json": components["schemas"]["validation-error-simple"]; + }; + }; + /** Bad Request */ + bad_request: { + content: { + "application/json": components["schemas"]["basic-error"]; + "application/scim+json": components["schemas"]["scim-error"]; + }; + }; + /** Validation failed, or the endpoint has been spammed. */ + validation_failed: { + content: { + "application/json": components["schemas"]["validation-error"]; + }; + }; + /** Accepted */ + accepted: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Forbidden */ + forbidden: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Requires authentication */ + requires_authentication: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Not modified */ + not_modified: unknown; + /** Response */ + actions_runner_labels: { + content: { + "application/json": { + total_count: number; + labels: components["schemas"]["runner-label"][]; + }; + }; + }; + /** Response */ + actions_runner_labels_readonly: { + content: { + "application/json": { + total_count: number; + labels: components["schemas"]["runner-label"][]; + }; + }; + }; + /** Response if GitHub Advanced Security is not enabled for this repository */ + code_scanning_forbidden_read: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Service unavailable */ + service_unavailable: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + }; + }; + }; + /** Forbidden Gist */ + forbidden_gist: { + content: { + "application/json": { + block?: { + reason?: string; + created_at?: string; + html_url?: string | null; + }; + message?: string; + documentation_url?: string; + }; + }; + }; + /** Moved permanently */ + moved_permanently: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Conflict */ + conflict: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Internal Error */ + internal_error: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Gone */ + gone: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Temporary Redirect */ + temporary_redirect: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Response if the repository is archived or if GitHub Advanced Security is not enabled for this repository */ + code_scanning_forbidden_write: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + /** Found */ + found: unknown; + /** A header with no content is returned. */ + no_content: unknown; + }; + parameters: { + /** @description The number of results per page (max 100). */ + "per-page": number; + /** @description Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor: string; + "delivery-id": number; + /** @description Page number of the results to fetch. */ + page: number; + /** @description Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since: string; + /** @description The unique identifier of the installation. */ + "installation-id": number; + /** @description The client ID of the GitHub app. */ + "client-id": string; + "app-slug": string; + /** @description The slug version of the enterprise name or the login of an organization. */ + "enterprise-or-org": string; + /** @description The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: string; + /** @description The unique identifier of the organization. */ + "org-id": number; + /** @description Only return runner groups that are allowed to be used by this organization. */ + "visible-to-organization": string; + /** @description Unique identifier of the self-hosted runner group. */ + "runner-group-id": number; + /** @description Unique identifier of the self-hosted runner. */ + "runner-id": number; + /** @description The name of a self-hosted runner's custom label. */ + "runner-label-name": string; + /** @description The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + "tool-name": components["schemas"]["code-scanning-analysis-tool-name"]; + /** @description The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + "tool-guid": components["schemas"]["code-scanning-analysis-tool-guid"]; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + "pagination-before": string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + "pagination-after": string; + /** @description The direction to sort the results by. */ + direction: "asc" | "desc"; + /** @description Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + "secret-scanning-alert-state": "open" | "resolved"; + /** + * @description A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + "secret-scanning-alert-secret-type": string; + /** @description A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + "secret-scanning-alert-resolution": string; + /** @description The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + "secret-scanning-alert-sort": "created" | "updated"; + /** @description The unique identifier of the gist. */ + "gist-id": string; + /** @description The unique identifier of the comment. */ + "comment-id": number; + /** @description A list of comma separated label names. Example: `bug,ui,@high` */ + labels: string; + /** @description account_id parameter */ + "account-id": number; + /** @description The unique identifier of the plan. */ + "plan-id": number; + /** @description The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort: "created" | "updated"; + /** @description The account owner of the repository. The name is not case sensitive. */ + owner: string; + /** @description The name of the repository. The name is not case sensitive. */ + repo: string; + /** @description If `true`, show notifications marked as read. */ + all: boolean; + /** @description If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating: boolean; + /** @description Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before: string; + /** @description The unique identifier of the pull request thread. */ + "thread-id": number; + /** @description An organization ID. Only return organizations with an ID greater than this ID. */ + "since-org": number; + /** @description The organization name. The name is not case sensitive. */ + org: string; + /** @description The name of the secret. */ + "secret-name": string; + /** @description The unique identifier of the repository. */ + "repository-id": number; + /** @description Only return runner groups that are allowed to be used by this repository. */ + "visible-to-repository": string; + /** @description The handle for the GitHub user account. */ + username: string; + /** @description The unique identifier of the role. */ + "role-id": number; + /** @description The unique identifier of the hook. */ + "hook-id": number; + /** @description The unique identifier of the invitation. */ + "invitation-id": number; + /** @description The name of the codespace. */ + "codespace-name": string; + /** @description The unique identifier of the migration. */ + "migration-id": number; + /** @description repo_name parameter */ + "repo-name": string; + /** @description The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + "package-visibility": "public" | "private" | "internal"; + /** @description The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + "package-type": + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** @description The name of the package. */ + "package-name": string; + /** @description Unique identifier of the package version. */ + "package-version-id": number; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + "secret-scanning-pagination-before-org-repo": string; + /** @description A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + "secret-scanning-pagination-after-org-repo": string; + /** @description The slug of the team name. */ + "team-slug": string; + /** @description The number that identifies the discussion. */ + "discussion-number": number; + /** @description The number that identifies the comment. */ + "comment-number": number; + /** @description The unique identifier of the reaction. */ + "reaction-id": number; + /** @description The unique identifier of the project. */ + "project-id": number; + /** @description The security feature to enable or disable. */ + "security-product": + | "dependency_graph" + | "dependabot_alerts" + | "dependabot_security_updates" + | "advanced_security" + | "secret_scanning" + | "secret_scanning_push_protection"; + /** + * @description The action to take. + * + * `enable_all` means to enable the specified security feature for all repositories in the organization. + * `disable_all` means to disable the specified security feature for all repositories in the organization. + */ + "org-security-product-enablement": "enable_all" | "disable_all"; + /** @description The unique identifier of the card. */ + "card-id": number; + /** @description The unique identifier of the column. */ + "column-id": number; + /** @description The unique identifier of the artifact. */ + "artifact-id": number; + /** @description The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + "git-ref": components["schemas"]["code-scanning-ref"]; + /** @description An explicit key or prefix for identifying the cache */ + "actions-cache-key": string; + /** @description The property to sort the results by. `created_at` means when the cache was created. `last_accessed_at` means when the cache was last accessed. `size_in_bytes` is the size of the cache in bytes. */ + "actions-cache-list-sort": + | "created_at" + | "last_accessed_at" + | "size_in_bytes"; + /** @description A key for identifying the cache. */ + "actions-cache-key-required": string; + /** @description The unique identifier of the GitHub Actions cache. */ + "cache-id": number; + /** @description The unique identifier of the job. */ + "job-id": number; + /** @description Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor: string; + /** @description Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + "workflow-run-branch": string; + /** @description Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event: string; + /** @description Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. */ + "workflow-run-status": + | "completed" + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "skipped" + | "stale" + | "success" + | "timed_out" + | "in_progress" + | "queued" + | "requested" + | "waiting"; + /** @description Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created: string; + /** @description If `true` pull requests are omitted from the response (empty array). */ + "exclude-pull-requests": boolean; + /** @description Returns workflow runs with the `check_suite_id` that you specify. */ + "workflow-run-check-suite-id": number; + /** @description Only returns workflow runs that are associated with the specified `head_sha`. */ + "workflow-run-head-sha": string; + /** @description The unique identifier of the workflow run. */ + "run-id": number; + /** @description The attempt number of the workflow run. */ + "attempt-number": number; + /** @description The ID of the workflow. You can also pass the workflow file name as a string. */ + "workflow-id": number | string; + /** @description The unique identifier of the autolink. */ + "autolink-id": number; + /** @description The name of the branch. */ + branch: string; + /** @description The unique identifier of the check run. */ + "check-run-id": number; + /** @description The unique identifier of the check suite. */ + "check-suite-id": number; + /** @description Returns check runs with the specified `name`. */ + "check-name": string; + /** @description Returns check runs with the specified `status`. */ + status: "queued" | "in_progress" | "completed"; + /** @description The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + "alert-number": components["schemas"]["alert-number"]; + /** @description The SHA of the commit. */ + "commit-sha": string; + /** + * @description A comma-separated list of states. If specified, only alerts with these states will be returned. + * + * Can be: `dismissed`, `fixed`, `open` + */ + "dependabot-alert-state": string; + /** + * @description A comma-separated list of severities. If specified, only alerts with these severities will be returned. + * + * Can be: `low`, `medium`, `high`, `critical` + */ + "dependabot-alert-severity": string; + /** + * @description A comma-separated list of ecosystems. If specified, only alerts for these ecosystems will be returned. + * + * Can be: `composer`, `go`, `maven`, `npm`, `nuget`, `pip`, `rubygems`, `rust` + */ + "dependabot-alert-ecosystem": string; + /** @description A comma-separated list of package names. If specified, only alerts for these packages will be returned. */ + "dependabot-alert-package": string; + /** @description A comma-separated list of full manifest paths. If specified, only alerts for these manifests will be returned. */ + "dependabot-alert-manifest": string; + /** @description Scope of the dependency on a Dependabot alert. */ + "dependabot-alert-scope": components["schemas"]["dependabot-alert-scope"]; + /** + * @description The property by which to sort the results. + * `created` means when the alert was created. + * `updated` means when the alert's state last changed. + */ + "dependabot-alert-sort": "created" | "updated"; + /** @description The number that identifies a Dependabot alert in its repository. You can find this at the end of the URL for a Dependabot alert within GitHub, or in `number` fields in the response from the `GET /repos/{owner}/{repo}/dependabot/alerts` operation. */ + "dependabot-alert-number": components["schemas"]["alert-number"]; + /** @description The full path, relative to the repository root, of the dependency manifest file. */ + "manifest-path": string; + /** @description deployment_id parameter */ + "deployment-id": number; + /** @description The name of the environment. */ + "environment-name": string; + /** @description The unique identifier of the branch policy. */ + "branch-policy-id": number; + /** @description A user ID. Only return users with an ID greater than this ID. */ + "since-user": number; + /** @description The number that identifies the issue. */ + "issue-number": number; + /** @description The unique identifier of the key. */ + "key-id": number; + /** @description The number that identifies the milestone. */ + "milestone-number": number; + /** @description The number that identifies the pull request. */ + "pull-number": number; + /** @description The unique identifier of the review. */ + "review-id": number; + /** @description The unique identifier of the asset. */ + "asset-id": number; + /** @description The unique identifier of the release. */ + "release-id": number; + /** @description The unique identifier of the tag protection. */ + "tag-protection-id": number; + /** @description The time frame to display results for. */ + per: "" | "day" | "week"; + /** @description A repository ID. Only return repositories with an ID greater than this ID. */ + "since-repo": number; + /** @description Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order: "desc" | "asc"; + /** @description The unique identifier of the team. */ + "team-id": number; + /** @description ID of the Repository to filter on */ + "repository-id-in-query": number; + /** @description The ID of the export operation, or `latest`. Currently only `latest` is currently supported. */ + "export-id": string; + /** @description The unique identifier of the GPG key. */ + "gpg-key-id": number; + /** @description The unique identifier of the SSH signing key. */ + "ssh-signing-key-id": number; + }; + headers: { + link?: string; + "content-type"?: string; + "x-common-marker-version"?: string; + "x-rate-limit-limit"?: number; + "x-rate-limit-remaining"?: number; + "x-rate-limit-reset"?: number; + location?: string; + }; +} + +export interface operations { + /** Get Hypermedia links to resources accessible in GitHub's REST API */ + "meta/root": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["root"]; + }; + }; + }; + }; + /** + * Returns the GitHub App associated with the authentication credentials used. To see how many app installations are associated with this GitHub App, see the `installations_count` in the response. For more details about your app's installations, see the "[List installations for the authenticated app](https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-authenticated": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"]; + }; + }; + }; + }; + /** Use this endpoint to complete the handshake necessary when implementing the [GitHub App Manifest flow](https://docs.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/). When you create a GitHub App with the manifest flow, you receive a temporary `code` used to retrieve the GitHub App's `id`, `pem` (private key), and `webhook_secret`. */ + "apps/create-from-manifest": { + parameters: { + path: { + code: string; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["integration"] & + ({ + client_id: string; + client_secret: string; + webhook_secret: string | null; + pem: string; + } & { [key: string]: unknown }); + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Returns the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-webhook-config-for-app": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Updates the webhook configuration for a GitHub App. For more information about configuring a webhook for your app, see "[Creating a GitHub App](/developers/apps/creating-a-github-app)." + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/update-webhook-config-for-app": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + }; + /** + * Returns a list of webhook deliveries for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/list-webhook-deliveries": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: components["parameters"]["cursor"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Returns a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-webhook-delivery": { + parameters: { + path: { + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Redeliver a delivery for the webhook configured for a GitHub App. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/redeliver-webhook-delivery": { + parameters: { + path: { + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + * + * The permissions the installation has are included under the `permissions` key. + */ + "apps/list-installations": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + outdated?: string; + }; + }; + responses: { + /** The permissions the installation has are included under the `permissions` key. */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["installation"][]; + }; + }; + }; + }; + /** + * Enables an authenticated GitHub App to find an installation's information using the installation id. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Uninstalls a GitHub App on a user, organization, or business account. If you prefer to temporarily suspend an app's access to your account's resources, then we recommend the "[Suspend an app installation](https://docs.github.com/rest/reference/apps/#suspend-an-app-installation)" endpoint. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/delete-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Creates an installation access token that enables a GitHub App to make authenticated API requests for the app's installation on an organization or individual account. Installation tokens expire one hour from the time you create them. Using an expired token produces a status code of `401 - Unauthorized`, and requires creating a new installation token. By default the installation token has access to all repositories that the installation can access. To restrict the access to specific repositories, you can provide the `repository_ids` when creating the token. When you omit `repository_ids`, the response does not contain the `repositories` key. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/create-installation-access-token": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["installation-token"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository names that the token should have access to */ + repositories?: string[]; + /** + * @description List of repository IDs that the token should have access to + * @example [ + * 1 + * ] + */ + repository_ids?: number[]; + permissions?: components["schemas"]["app-permissions"]; + }; + }; + }; + }; + /** + * Suspends a GitHub App on a user, organization, or business account, which blocks the app from accessing the account's resources. When a GitHub App is suspended, the app's access to the GitHub API or webhook events is blocked for that account. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/suspend-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Removes a GitHub App installation suspension. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/unsuspend-installation": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * OAuth application owners can revoke a grant for their OAuth application and a specific user. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. You must also provide a valid OAuth `access_token` as an input parameter and the grant for the token's owner will be deleted. + * Deleting an OAuth application's grant will also delete all OAuth tokens associated with the application for the user. Once deleted, the application will have no access to the user's account and will no longer be listed on [the application authorizations settings screen within GitHub](https://github.com/settings/applications#authorized). + */ + "apps/delete-authorization": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth access token used to authenticate to the GitHub API. */ + access_token: string; + }; + }; + }; + }; + /** OAuth applications can use a special API method for checking OAuth token validity without exceeding the normal rate limits for failed login attempts. Authentication works differently with this particular endpoint. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) to use this endpoint, where the username is the OAuth application `client_id` and the password is its `client_secret`. Invalid tokens will return `404 NOT FOUND`. */ + "apps/check-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The access_token of the OAuth application. */ + access_token: string; + }; + }; + }; + }; + /** OAuth application owners can revoke a single token for an OAuth application. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. */ + "apps/delete-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The OAuth access token used to authenticate to the GitHub API. */ + access_token: string; + }; + }; + }; + }; + /** OAuth applications can use this API method to reset a valid OAuth token without end-user involvement. Applications must save the "token" property in the response because changes take effect immediately. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + "apps/reset-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The access_token of the OAuth application. */ + access_token: string; + }; + }; + }; + }; + /** Use a non-scoped user-to-server OAuth access token to create a repository scoped and/or permission scoped user-to-server OAuth access token. You can specify which repositories the token can access and which permissions are granted to the token. You must use [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) when accessing this endpoint, using the OAuth application's `client_id` and `client_secret` as the username and password. Invalid tokens will return `404 NOT FOUND`. */ + "apps/scope-token": { + parameters: { + path: { + /** The client ID of the GitHub app. */ + client_id: components["parameters"]["client-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["authorization"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The OAuth access token used to authenticate to the GitHub API. + * @example e72e16c7e42f292c6912e7710c838347ae178b4a + */ + access_token: string; + /** + * @description The name of the user or organization to scope the user-to-server access token to. **Required** unless `target_id` is specified. + * @example octocat + */ + target?: string; + /** + * @description The ID of the user or organization to scope the user-to-server access token to. **Required** unless `target` is specified. + * @example 1 + */ + target_id?: number; + /** @description The list of repository names to scope the user-to-server access token to. `repositories` may not be specified if `repository_ids` is specified. */ + repositories?: string[]; + /** + * @description The list of repository IDs to scope the user-to-server access token to. `repository_ids` may not be specified if `repositories` is specified. + * @example [ + * 1 + * ] + */ + repository_ids?: number[]; + permissions?: components["schemas"]["app-permissions"]; + }; + }; + }; + }; + /** + * **Note**: The `:app_slug` is just the URL-friendly name of your GitHub App. You can find this on the settings page for your GitHub App (e.g., `https://github.com/settings/apps/:app_slug`). + * + * If the GitHub App you specify is public, you can access this endpoint without authenticating. If the GitHub App you specify is private, you must authenticate with a [personal access token](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line/) or an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/get-by-slug": { + parameters: { + path: { + app_slug: components["parameters"]["app-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "codes-of-conduct/get-all-codes-of-conduct": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-of-conduct"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + "codes-of-conduct/get-conduct-code": { + parameters: { + path: { + key: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-of-conduct"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all the emojis available to use on GitHub. */ + "emojis/get": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": { [key: string]: string }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Returns aggregate usage metrics for your GitHub Enterprise Server 3.5+ instance for a specified time period up to 365 days. + * + * To use this endpoint, your GitHub Enterprise Server instance must be connected to GitHub Enterprise Cloud using GitHub Connect. You must enable Server Statistics, and for the API request provide your enterprise account name or organization name connected to the GitHub Enterprise Server. For more information, see "[Enabling Server Statistics for your enterprise](/admin/configuration/configuring-github-connect/enabling-server-statistics-for-your-enterprise)" in the GitHub Enterprise Server documentation. + * + * You'll need to use a personal access token: + * - If you connected your GitHub Enterprise Server to an enterprise account and enabled Server Statistics, you'll need a personal access token with the `read:enterprise` permission. + * - If you connected your GitHub Enterprise Server to an organization account and enabled Server Statistics, you'll need a personal access token with the `read:org` permission. + * + * For more information on creating a personal access token, see "[Creating a personal access token](/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token)." + */ + "enterprise-admin/get-server-statistics": { + parameters: { + path: { + /** The slug version of the enterprise name or the login of an organization. */ + enterprise_or_org: components["parameters"]["enterprise-or-org"]; + }; + query: { + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + date_start?: string; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + date_end?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["server-statistics"]; + }; + }; + }; + }; + /** + * Gets the total GitHub Actions cache usage for an enterprise. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "actions/get-actions-cache-usage-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["actions-cache-usage-org-enterprise"]; + }; + }; + }; + }; + /** + * Gets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-github-actions-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-enterprise-permissions"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions permissions policy for organizations and allowed actions and reusable workflows in an enterprise. + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-github-actions-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + enabled_organizations: components["schemas"]["enabled-organizations"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + }; + /** + * Lists the organizations that are selected to have GitHub Actions enabled in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-selected-organizations-enabled-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + organizations: components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-selected-organizations-enabled-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of organization IDs to enable for GitHub Actions. */ + selected_organization_ids: number[]; + }; + }; + }; + }; + /** + * Adds an organization to the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/enable-selected-organization-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes an organization from the list of selected organizations that are enabled for GitHub Actions in an enterprise. To use this endpoint, the enterprise permission policy for `enabled_organizations` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/disable-selected-organization-github-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-allowed-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Sets the actions and reusable workflows that are allowed in an enterprise. To use this endpoint, the enterprise permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an enterprise](#set-github-actions-permissions-for-an-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-allowed-actions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + "actions/get-github-actions-default-workflow-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Success response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an enterprise, and sets + * whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Enforcing a policy for workflow permissions in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-policies-for-github-actions-in-your-enterprise#enforcing-a-policy-for-workflow-permissions-in-your-enterprise)." + * + * You must authenticate using an access token with the `admin:enterprise` scope to use this endpoint. + * GitHub Apps must have the `enterprise_administration:write` permission to use this endpoint. + */ + "actions/set-github-actions-default-workflow-permissions-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Success response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + }; + /** + * Lists all self-hosted runner groups for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-self-hosted-runner-groups-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only return runner groups that are allowed to be used by this organization. */ + visible_to_organization?: components["parameters"]["visible-to-organization"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + runner_groups: components["schemas"]["runner-groups-enterprise"][]; + }; + }; + }; + }; + }; + /** + * Creates a new self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/create-self-hosted-runner-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["runner-groups-enterprise"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name: string; + /** + * @description Visibility of a runner group. You can select all organizations or select individual organization. + * @enum {string} + */ + visibility?: "selected" | "all"; + /** @description List of organization IDs that can access the runner group. */ + selected_organization_ids?: number[]; + /** @description List of runner IDs to add to the runner group. */ + runners?: number[]; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-self-hosted-runner-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-enterprise"]; + }; + }; + }; + }; + /** + * Deletes a self-hosted runner group for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/delete-self-hosted-runner-group-from-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Updates the `name` and `visibility` of a self-hosted runner group in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/update-self-hosted-runner-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-enterprise"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name?: string; + /** + * @description Visibility of a runner group. You can select all organizations or select individual organizations. + * @default all + * @enum {string} + */ + visibility?: "selected" | "all"; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * Lists the organizations with access to a self-hosted runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + organizations: components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of organizations that have access to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of organization IDs that can access the runner group. */ + selected_organization_ids: number[]; + }; + }; + }; + }; + /** + * Adds an organization to the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/add-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes an organization from the list of selected organizations that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an enterprise](#create-a-self-hosted-runner-group-for-an-enterprise)." + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-org-access-to-self-hosted-runner-group-in-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the organization. */ + org_id: components["parameters"]["org-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists the self-hosted runners that are in a specific enterprise group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-self-hosted-runners-in-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of self-hosted runners that are part of an enterprise runner group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-self-hosted-runners-in-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of runner IDs to add to the runner group. */ + runners: number[]; + }; + }; + }; + }; + /** + * Adds a self-hosted runner to a runner group configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` + * scope to use this endpoint. + */ + "enterprise-admin/add-self-hosted-runner-to-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes a self-hosted runner from a group configured in an enterprise. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-self-hosted-runner-from-group-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all self-hosted runners configured for an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-self-hosted-runners-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count?: number; + runners?: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-runner-applications-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/enterprises/octo-enterprise --token TOKEN + * ``` + */ + "enterprise-admin/create-registration-token-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an enterprise. The token expires after one hour. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an enterprise, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "enterprise-admin/create-remove-token-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/get-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Forces the removal of a self-hosted runner from an enterprise. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/delete-self-hosted-runner-from-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all labels for a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/list-labels-for-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/set-custom-labels-for-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + }; + /** + * Add custom labels to a self-hosted runner configured in an enterprise. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/add-custom-labels-to-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * enterprise. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-all-custom-labels-from-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an enterprise. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `manage_runners:enterprise` scope to use this endpoint. + */ + "enterprise-admin/remove-custom-label-from-self-hosted-runner-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + /** The name of a self-hosted runner's custom label. */ + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an enterprise. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be a member of the enterprise, + * and you must use an access token with the `repo` scope or `security_events` scope. + */ + "code-scanning/list-alerts-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["pagination-before"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["pagination-after"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** If specified, only code scanning alerts with this state will be returned. */ + state?: components["schemas"]["code-scanning-alert-state"]; + /** The property by which to sort the results. */ + sort?: "created" | "updated"; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["code-scanning-organization-alert-items"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists secret scanning alerts for eligible repositories in an enterprise, from newest to oldest. + * To use this endpoint, you must be a member of the enterprise, and you must use an access token with the `repo` scope or `security_events` scope. Alerts are only returned for organizations in the enterprise for which you are an organization owner or a [security manager](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization). + */ + "secret-scanning/list-alerts-for-enterprise": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + state?: components["parameters"]["secret-scanning-alert-state"]; + /** + * A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + /** A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + /** The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + sort?: components["parameters"]["secret-scanning-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["pagination-before"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["pagination-after"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-secret-scanning-alert"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets the GitHub Advanced Security active committers for an enterprise per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of active_users for each repository. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + "billing/get-github-advanced-security-billing-ghe": { + parameters: { + path: { + /** The slug version of the enterprise name. You can also substitute this value with the enterprise id. */ + enterprise: components["parameters"]["enterprise"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Success */ + 200: { + content: { + "application/json": components["schemas"]["advanced-security-active-committers"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + }; + }; + /** We delay the public events feed by five minutes, which means the most recent event returned by the public events API actually occurred at least five minutes ago. */ + "activity/list-public-events": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * GitHub provides several timeline resources in [Atom](http://en.wikipedia.org/wiki/Atom_(standard)) format. The Feeds API lists all the feeds available to the authenticated user: + * + * * **Timeline**: The GitHub global public timeline + * * **User**: The public timeline for any user, using [URI template](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) + * * **Current user public**: The public timeline for the authenticated user + * * **Current user**: The private timeline for the authenticated user + * * **Current user actor**: The private timeline for activity created by the authenticated user + * * **Current user organizations**: The private timeline for the organizations the authenticated user is a member of. + * * **Security advisories**: A collection of public announcements that provide information about security-related vulnerabilities in software on GitHub. + * + * **Note**: Private feeds are only returned when [authenticating via Basic Auth](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) since current feed URIs use the older, non revocable auth tokens. + */ + "activity/get-feeds": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["feed"]; + }; + }; + }; + }; + /** Lists the authenticated user's gists or if called anonymously, this endpoint returns all public gists: */ + "gists/list": { + parameters: { + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Allows you to add a new gist with one or more files. + * + * **Note:** Don't name your files "gistfile" with a numerical suffix. This is the format of the automatic naming scheme that Gist uses internally. + */ + "gists/create": { + parameters: {}; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Description of the gist + * @example Example Ruby script + */ + description?: string; + /** + * @description Names and content for the files that make up the gist + * @example { + * "hello.rb": { + * "content": "puts \"Hello, World!\"" + * } + * } + */ + files: { + [key: string]: { + /** @description Content of the file */ + content: string; + }; + }; + public?: boolean | ("true" | "false"); + }; + }; + }; + }; + /** + * List public gists sorted by most recently updated to least recently updated. + * + * Note: With [pagination](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination), you can fetch up to 3000 gists. For example, you can fetch 100 pages with 30 gists per page or 30 pages with 100 gists per page. + */ + "gists/list-public": { + parameters: { + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List the authenticated user's starred gists: */ + "gists/list-starred": { + parameters: { + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "gists/get": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden_gist"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/delete": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Allows you to update or delete a gist file and rename gist files. Files from the previous version of the gist that aren't explicitly changed during an edit are unchanged. */ + "gists/update": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Description of the gist + * @example Example Ruby script + */ + description?: string; + /** + * @description Names of files to be updated + * @example { + * "hello.rb": { + * "content": "blah", + * "filename": "goodbye.rb" + * } + * } + */ + files?: { [key: string]: Partial<{ [key: string]: unknown }> }; + } | null; + }; + }; + }; + "gists/list-comments": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gist-comment"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/create-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + }; + }; + }; + }; + "gists/get-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden_gist"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/delete-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/update-comment": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The comment text. + * @example Body of the attachment + */ + body: string; + }; + }; + }; + }; + "gists/list-commits": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["gist-commit"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/list-forks": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gist-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/fork": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["base-gist"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "gists/check-is-starred": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response if gist is starred */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + /** Not Found if gist is not starred */ + 404: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + }; + }; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + "gists/star": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/unstar": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "gists/get-revision": { + parameters: { + path: { + /** The unique identifier of the gist. */ + gist_id: components["parameters"]["gist-id"]; + sha: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gist-simple"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List all templates available to pass as an option when [creating a repository](https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user). */ + "gitignore/get-all-templates": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * The API also allows fetching the source of a single template. + * Use the raw [media type](https://docs.github.com/rest/overview/media-types/) to get the raw contents. + */ + "gitignore/get-template": { + parameters: { + path: { + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gitignore-template"]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List repositories that an app installation can access. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/list-repos-accessible-to-installation": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + /** @example selected */ + repository_selection?: string; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Revokes the installation token you're using to authenticate as an installation and access this endpoint. + * + * Once an installation token is revoked, the token is invalidated and cannot be used. Other endpoints that require the revoked installation token must have a new installation token to work. You can create a new token using the "[Create an installation access token for an app](https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app)" endpoint. + * + * You must use an [installation access token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-an-installation) to access this endpoint. + */ + "apps/revoke-installation-access-token": { + parameters: {}; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List issues assigned to the authenticated user across all visible repositories including owned repositories, member + * repositories, and organization repositories. You can use the `filter` query parameter to fetch issues that are not + * necessarily assigned to you. + * + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list": { + parameters: { + query: { + /** Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + collab?: boolean; + orgs?: boolean; + owned?: boolean; + pulls?: boolean; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "licenses/get-all-commonly-used": { + parameters: { + query: { + featured?: boolean; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["license-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + "licenses/get": { + parameters: { + path: { + license: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["license"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "markdown/render": { + parameters: {}; + responses: { + /** Response */ + 200: { + headers: { + "Content-Length"?: string; + }; + content: { + "text/html": string; + }; + }; + 304: components["responses"]["not_modified"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The Markdown text to render in HTML. */ + text: string; + /** + * @description The rendering mode. + * @default markdown + * @example markdown + * @enum {string} + */ + mode?: "markdown" | "gfm"; + /** @description The repository context to use when creating references in `gfm` mode. For example, setting `context` to `octo-org/octo-repo` will change the text `#42` into an HTML link to issue 42 in the `octo-org/octo-repo` repository. */ + context?: string; + }; + }; + }; + }; + /** You must send Markdown as plain text (using a `Content-Type` header of `text/plain` or `text/x-markdown`) to this endpoint, rather than using JSON format. In raw mode, [GitHub Flavored Markdown](https://github.github.com/gfm/) is not supported and Markdown will be rendered in plain format like a README.md file. Markdown content must be 400 KB or less. */ + "markdown/render-raw": { + parameters: {}; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "text/html": string; + }; + }; + 304: components["responses"]["not_modified"]; + }; + requestBody: { + content: { + "text/plain": string; + "text/x-markdown": string; + }; + }; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/get-subscription-plan-for-account": { + parameters: { + path: { + /** account_id parameter */ + account_id: components["parameters"]["account-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["marketplace-purchase"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + /** Not Found when the account has not purchased the listing */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-plans": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-listing-plan"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns user and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-accounts-for-plan": { + parameters: { + path: { + /** The unique identifier of the plan. */ + plan_id: components["parameters"]["plan-id"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** To return the oldest accounts first, set to `asc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-purchase"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Shows whether the user or organization account actively subscribes to a plan listed by the authenticated GitHub App. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/get-subscription-plan-for-account-stubbed": { + parameters: { + path: { + /** account_id parameter */ + account_id: components["parameters"]["account-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["marketplace-purchase"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + /** Not Found when the account has not purchased the listing */ + 404: unknown; + }; + }; + /** + * Lists all plans that are part of your GitHub Marketplace listing. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-plans-stubbed": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-listing-plan"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * Returns repository and organization accounts associated with the specified plan, including free plans. For per-seat pricing, you see the list of accounts that have purchased the plan, including the number of seats purchased. When someone submits a plan change that won't be processed until the end of their billing cycle, you will also see the upcoming pending change. + * + * GitHub Apps must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. OAuth Apps must use [basic authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication) with their client ID and client secret to access this endpoint. + */ + "apps/list-accounts-for-plan-stubbed": { + parameters: { + path: { + /** The unique identifier of the plan. */ + plan_id: components["parameters"]["plan-id"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** To return the oldest accounts first, set to `asc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["marketplace-purchase"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + }; + }; + /** + * Returns meta information about GitHub, including a list of GitHub's IP addresses. For more information, see "[About GitHub's IP addresses](https://docs.github.com/articles/about-github-s-ip-addresses/)." + * + * **Note:** The IP addresses shown in the documentation's response are only example values. You must always query the API directly to get the latest list of IP addresses. + */ + "meta/get": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["api-overview"]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + "activity/list-public-events-for-repo-network": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** List all notifications for the current user, sorted by most recently updated. */ + "activity/list-notifications-for-authenticated-user": { + parameters: { + query: { + /** If `true`, show notifications marked as read. */ + all?: components["parameters"]["all"]; + /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating?: components["parameters"]["participating"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: components["parameters"]["before"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 50). */ + per_page?: number; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["thread"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Marks all notifications as "read" removes it from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + "activity/mark-notifications-as-read": { + parameters: {}; + responses: { + /** Response */ + 202: { + content: { + "application/json": { + message?: string; + }; + }; + }; + /** Reset Content */ + 205: unknown; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * Format: date-time + * @description Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. + */ + last_read_at?: string; + /** @description Whether the notification has been read. */ + read?: boolean; + }; + }; + }; + }; + "activity/get-thread": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["thread"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "activity/mark-thread-as-read": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Reset Content */ + 205: unknown; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * This checks to see if the current user is subscribed to a thread. You can also [get a repository subscription](https://docs.github.com/rest/reference/activity#get-a-repository-subscription). + * + * Note that subscriptions are only generated if a user is participating in a conversation--for example, they've replied to the thread, were **@mentioned**, or manually subscribe to a thread. + */ + "activity/get-thread-subscription-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["thread-subscription"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * If you are watching a repository, you receive notifications for all threads by default. Use this endpoint to ignore future notifications for threads until you comment on the thread or get an **@mention**. + * + * You can also use this endpoint to subscribe to threads that you are currently not receiving notifications for or to subscribed to threads that you have previously ignored. + * + * Unsubscribing from a conversation in a repository that you are not watching is functionally equivalent to the [Delete a thread subscription](https://docs.github.com/rest/reference/activity#delete-a-thread-subscription) endpoint. + */ + "activity/set-thread-subscription": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["thread-subscription"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to block all notifications from a thread. + * @default false + */ + ignored?: boolean; + }; + }; + }; + }; + /** Mutes all future notifications for a conversation until you comment on the thread or get an **@mention**. If you are watching the repository of the thread, you will still receive notifications. To ignore future notifications for a repository you are watching, use the [Set a thread subscription](https://docs.github.com/rest/reference/activity#set-a-thread-subscription) endpoint and set `ignore` to `true`. */ + "activity/delete-thread-subscription": { + parameters: { + path: { + /** The unique identifier of the pull request thread. */ + thread_id: components["parameters"]["thread-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the octocat as ASCII art */ + "meta/get-octocat": { + parameters: { + query: { + /** The words to show in Octocat's speech bubble */ + s?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/octocat-stream": string; + }; + }; + }; + }; + /** + * Lists all organizations, in the order that they were created on GitHub. + * + * **Note:** Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of organizations. + */ + "orgs/list": { + parameters: { + query: { + /** An organization ID. Only return organizations with an ID greater than this ID. */ + since?: components["parameters"]["since-org"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * List the custom repository roles available in this organization. In order to see custom + * repository roles in an organization, the authenticated user must be an organization owner. + * + * To use this endpoint the authenticated user must be an administrator for the organization or of an repository of the organizaiton and must use an access token with `admin:org repo` scope. + * GitHub Apps must have the `organization_custom_roles:read` organization permission to use this endpoint. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)". + */ + "orgs/list-custom-roles": { + parameters: { + path: { + /** The unique identifier of the organization. */ + organization_id: string; + }; + }; + responses: { + /** Response - list of custom role names */ + 200: { + content: { + "application/json": { + /** + * @description The number of custom roles in this organization + * @example 3 + */ + total_count?: number; + custom_roles?: components["schemas"]["organization-custom-repository-role"][]; + }; + }; + }; + }; + }; + /** + * Lists all Codespaces secrets available at the organization-level without revealing their encrypted values. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/list-org-secrets": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["codespaces-org-secret"][]; + }; + }; + }; + }; + }; + /** Gets a public key for an organization, which is required in order to encrypt secrets. You need to encrypt the value of a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + "codespaces/get-org-public-key": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-public-key"]; + }; + }; + }; + }; + /** + * Gets an organization secret without revealing its encrypted value. + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/get-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["codespaces-org-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [libsodium-wrappers](https://www.npmjs.com/package/libsodium-wrappers) library. + * + * ``` + * // Written with ❤️ by PSJ and free to use under The Unlicense. + * const sodium=require('libsodium-wrappers') + * const secret = 'plain-text-secret' // replace with secret before running the script. + * const key = 'base64-encoded-public-key' // replace with the Base64 encoded public key. + * + * //Check if libsodium is ready and then proceed. + * + * sodium.ready.then( ()=>{ + * + * // Convert Secret & Base64 key to Uint8Array. + * let binkey= sodium.from_base64(key, sodium.base64_variants.ORIGINAL) //Equivalent of Buffer.from(key, 'base64') + * let binsec= sodium.from_string(secret) // Equivalent of Buffer.from(secret) + * + * //Encrypt the secret using LibSodium + * let encBytes= sodium.crypto_box_seal(binsec,binkey) // Similar to tweetsodium.seal(binsec,binkey) + * + * // Convert encrypted Uint8Array to Base64 + * let output=sodium.to_base64(encBytes, sodium.base64_variants.ORIGINAL) //Equivalent of Buffer.from(encBytes).toString('base64') + * + * console.log(output) + * }); + * ``` + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/codespaces#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description The ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository IDs that can access the organization secret. You can only provide a list of repository IDs when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: number[]; + }; + }; + }; + }; + /** Deletes an organization secret using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + "codespaces/delete-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + "codespaces/list-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + "codespaces/set-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + /** Conflict when visibility type not set to selected */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + "codespaces/add-selected-repo-to-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + 404: components["responses"]["not_found"]; + /** Conflict when visibility type is not set to selected */ + 409: unknown; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. */ + "codespaces/remove-selected-repo-from-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** Response when repository was removed from the selected list */ + 204: never; + 404: components["responses"]["not_found"]; + /** Conflict when visibility type not set to selected */ + 409: unknown; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * To see many of the organization response values, you need to be an authenticated organization owner with the `admin:org` scope. When the value of `two_factor_requirement_enabled` is `true`, the organization requires all members, billing managers, and outside collaborators to enable [two-factor authentication](https://docs.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/). + * + * GitHub Apps with the `Organization plan` permission can use this endpoint to retrieve information about an organization's GitHub plan. See "[Authenticating with GitHub Apps](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/)" for details. For an example response, see 'Response with GitHub plan information' below." + */ + "orgs/get": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Parameter Deprecation Notice:** GitHub will replace and discontinue `members_allowed_repository_creation_type` in favor of more granular permissions. The new input parameters are `members_can_create_public_repositories`, `members_can_create_private_repositories` for all organizations and `members_can_create_internal_repositories` for organizations associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see the [blog post](https://developer.github.com/changes/2019-12-03-internal-visibility-changes). + * + * Enables an authenticated organization owner with the `admin:org` scope to update the organization's profile and member privileges. + */ + "orgs/update": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-full"]; + }; + }; + 409: components["responses"]["conflict"]; + /** Validation failed */ + 422: { + content: { + "application/json": + | components["schemas"]["validation-error"] + | components["schemas"]["validation-error-simple"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Billing email address. This address is not publicized. */ + billing_email?: string; + /** @description The company name. */ + company?: string; + /** @description The publicly visible email address. */ + email?: string; + /** @description The Twitter username of the company. */ + twitter_username?: string; + /** @description The location. */ + location?: string; + /** @description The shorthand name of the company. */ + name?: string; + /** @description The description of the company. */ + description?: string; + /** @description Whether an organization can use organization projects. */ + has_organization_projects?: boolean; + /** @description Whether repositories that belong to the organization can use repository projects. */ + has_repository_projects?: boolean; + /** + * @description Default permission level members have for organization repositories. + * @default read + * @enum {string} + */ + default_repository_permission?: "read" | "write" | "admin" | "none"; + /** + * @description Whether of non-admin organization members can create repositories. **Note:** A parameter can override this parameter. See `members_allowed_repository_creation_type` in this table for details. + * @default true + */ + members_can_create_repositories?: boolean; + /** @description Whether organization members can create internal repositories, which are visible to all enterprise members. You can only allow members to create internal repositories if your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_internal_repositories?: boolean; + /** @description Whether organization members can create private repositories, which are visible to organization members with permission. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_private_repositories?: boolean; + /** @description Whether organization members can create public repositories, which are visible to anyone. For more information, see "[Restricting repository creation in your organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/restricting-repository-creation-in-your-organization)" in the GitHub Help documentation. */ + members_can_create_public_repositories?: boolean; + /** + * @description Specifies which types of repositories non-admin organization members can create. `private` is only available to repositories that are part of an organization on GitHub Enterprise Cloud. + * **Note:** This parameter is deprecated and will be removed in the future. Its return value ignores internal repositories. Using this parameter overrides values set in `members_can_create_repositories`. See the parameter deprecation notice in the operation description for details. + * @enum {string} + */ + members_allowed_repository_creation_type?: "all" | "private" | "none"; + /** + * @description Whether organization members can create GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_pages?: boolean; + /** + * @description Whether organization members can create public GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_public_pages?: boolean; + /** + * @description Whether organization members can create private GitHub Pages sites. Existing published sites will not be impacted. + * @default true + */ + members_can_create_private_pages?: boolean; + /** + * @description Whether organization members can fork private organization repositories. + * @default false + */ + members_can_fork_private_repositories?: boolean; + /** + * @description Whether contributors to organization repositories are required to sign off on commits they make through GitHub's web interface. + * @default false + */ + web_commit_signoff_required?: boolean; + /** @example "http://github.blog" */ + blog?: string; + /** + * @description Whether GitHub Advanced Security is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + advanced_security_enabled_for_new_repositories?: boolean; + /** + * @description Whether Dependabot alerts is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + dependabot_alerts_enabled_for_new_repositories?: boolean; + /** + * @description Whether Dependabot security updates is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + dependabot_security_updates_enabled_for_new_repositories?: boolean; + /** + * @description Whether dependency graph is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + dependency_graph_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + secret_scanning_enabled_for_new_repositories?: boolean; + /** + * @description Whether secret scanning push protection is automatically enabled for new repositories. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * You can check which security and analysis features are currently enabled by using a `GET /orgs/{org}` request. + */ + secret_scanning_push_protection_enabled_for_new_repositories?: boolean; + }; + }; + }; + }; + /** + * Gets the total GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["actions-cache-usage-org-enterprise"]; + }; + }; + }; + }; + /** + * Lists repositories and their GitHub Actions cache usage for an organization. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * You must authenticate using an access token with the `read:org` scope to use this endpoint. GitHub Apps must have the `organization_admistration:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage-by-repo-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + repository_cache_usages: components["schemas"]["actions-cache-usage-by-repository"][]; + }; + }; + }; + }; + }; + /** + * Gets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-github-actions-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-organization-permissions"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions permissions policy for repositories and allowed actions and reusable workflows in an organization. + * + * If the organization belongs to an enterprise that has set restrictive permissions at the enterprise level, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-github-actions-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + enabled_repositories: components["schemas"]["enabled-repositories"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + }; + /** + * Lists the selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/list-selected-repositories-enabled-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + }; + }; + /** + * Replaces the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-selected-repositories-enabled-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository IDs to enable for GitHub Actions. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** + * Adds a repository to the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/enable-selected-repository-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Removes a repository from the list of selected repositories that are enabled for GitHub Actions in an organization. To use this endpoint, the organization permission policy for `enabled_repositories` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/disable-selected-repository-github-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets the selected actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)."" + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-allowed-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Sets the actions and reusable workflows that are allowed in an organization. To use this endpoint, the organization permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for an organization](#set-github-actions-permissions-for-an-organization)." + * + * If the organization belongs to an enterprise that has `selected` actions and reusable workflows set at the enterprise level, then you cannot override any of the enterprise's allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the organization must belong to an enterprise. If the organization does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories in the organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-allowed-actions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, + * as well as whether GitHub Actions can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/get-github-actions-default-workflow-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in an organization, and sets if GitHub Actions + * can submit approving pull request reviews. For more information, see + * "[Setting the permissions of the GITHUB_TOKEN for your organization](https://docs.github.com/organizations/managing-organization-settings/disabling-or-limiting-github-actions-for-your-organization#setting-the-permissions-of-the-github_token-for-your-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `administration` organization permission to use this API. + */ + "actions/set-github-actions-default-workflow-permissions-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Success response */ + 204: never; + /** Conflict response when changing a setting is prevented by the owning enterprise */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists all self-hosted runner groups configured in an organization and inherited from an enterprise. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runner-groups-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only return runner groups that are allowed to be used by this repository. */ + visible_to_repository?: components["parameters"]["visible-to-repository"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + runner_groups: components["schemas"]["runner-groups-org"][]; + }; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Creates a new self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/create-self-hosted-runner-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["runner-groups-org"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name: string; + /** + * @description Visibility of a runner group. You can select all repositories, select individual repositories, or limit access to private repositories. + * @default all + * @enum {string} + */ + visibility?: "selected" | "all" | "private"; + /** @description List of repository IDs that can access the runner group. */ + selected_repository_ids?: number[]; + /** @description List of runner IDs to add to the runner group. */ + runners?: number[]; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Gets a specific self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/get-self-hosted-runner-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-org"]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Deletes a self-hosted runner group for an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-group-from-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Updates the `name` and `visibility` of a self-hosted runner group in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/update-self-hosted-runner-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-groups-org"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Name of the runner group. */ + name: string; + /** + * @description Visibility of a runner group. You can select all repositories, select individual repositories, or all private repositories. + * @enum {string} + */ + visibility?: "selected" | "all" | "private"; + /** + * @description Whether the runner group can be used by `public` repositories. + * @default false + */ + allows_public_repositories?: boolean; + /** + * @description If `true`, the runner group will be restricted to running only the workflows specified in the `selected_workflows` array. + * @default false + */ + restricted_to_workflows?: boolean; + /** @description List of workflows the runner group should be allowed to run. This setting will be ignored unless `restricted_to_workflows` is set to `true`. */ + selected_workflows?: string[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud and GitHub Enterprise Server. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists the repositories with access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of repositories that have access to a self-hosted runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of repository IDs that can access the runner group. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a repository from the list of selected repositories that can access a self-hosted runner group. The runner group must have `visibility` set to `selected`. For more information, see "[Create a self-hosted runner group for an organization](#create-a-self-hosted-runner-group-for-an-organization)." + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-repo-access-to-self-hosted-runner-group-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Lists self-hosted runners that are in a specific organization group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runners-in-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * Replaces the list of self-hosted runners that are part of an organization runner group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-self-hosted-runners-in-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description List of runner IDs to add to the runner group. */ + runners: number[]; + }; + }; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Adds a self-hosted runner to a runner group configured in an organization. + * + * You must authenticate using an access token with the `admin:org` + * scope to use this endpoint. + */ + "actions/add-self-hosted-runner-to-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The self-hosted runner groups REST API is available with GitHub Enterprise Cloud. For more information, see "[GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products)." + * + * + * Removes a self-hosted runner from a group configured in an organization. The runner is then returned to the default group. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-self-hosted-runner-from-group-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner group. */ + runner_group_id: components["parameters"]["runner-group-id"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all self-hosted runners configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-self-hosted-runners-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-runner-applications-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org --token TOKEN + * ``` + */ + "actions/create-registration-token-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script to remove a self-hosted runner from an organization. The token expires after one hour. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from an organization, replace `TOKEN` with the remove token provided by this + * endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "actions/create-remove-token-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/get-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Forces the removal of a self-hosted runner from an organization. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-from-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all labels for a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/list-labels-for-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/set-custom-labels-for-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + }; + /** + * Add custom labels to a self-hosted runner configured in an organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/add-custom-labels-to-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + }; + /** + * Remove all custom labels from a self-hosted runner configured in an + * organization. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-all-custom-labels-from-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in an organization. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "actions/remove-custom-label-from-self-hosted-runner-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + /** The name of a self-hosted runner's custom label. */ + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/list-org-secrets": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["organization-actions-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/get-org-public-key": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/get-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-actions-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to + * use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/actions#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: (Partial & Partial)[]; + }; + }; + }; + }; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/delete-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/list-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/set-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/add-selected-repo-to-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + /** Conflict when visibility type is not set to selected */ + 409: unknown; + }; + }; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `secrets` organization permission to use this endpoint. */ + "actions/remove-selected-repo-from-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** Response when repository was removed from the selected list */ + 204: never; + /** Conflict when visibility type not set to selected */ + 409: unknown; + }; + }; + /** List the users blocked by an organization. */ + "orgs/list-blocked-users": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "orgs/check-blocked-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** If the user is blocked */ + 204: never; + /** If the user is not blocked */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + "orgs/block-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + "orgs/unblock-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists code scanning alerts for the default branch for all eligible repositories in an organization. Eligible repositories are repositories that are owned by organizations that you own or for which you are a security manager. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an owner or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/list-alerts-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. */ + before?: components["parameters"]["pagination-before"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. */ + after?: components["parameters"]["pagination-after"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** If specified, only code scanning alerts with this state will be returned. */ + state?: components["schemas"]["code-scanning-alert-state"]; + /** The property by which to sort the results. */ + sort?: "created" | "updated"; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["code-scanning-organization-alert-items"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the codespaces associated to a specified organization. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/list-in-organization": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * **Note**: This operation is in beta and is subject to change. + * + * Creates a custom repository role that can be used by all repositories owned by the organization. + * + * To use this endpoint the authenticated user must be an administrator for the organization and must use an access token with `admin:org` scope. + * GitHub Apps must have the `organization_custom_roles:write` organization permission to use this endpoint. + * + * For more information on custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)." + */ + "orgs/create-custom-role": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["organization-custom-repository-role"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the custom role. */ + name: string; + /** @description A short description about the intended usage of this role or what permissions it grants. */ + description?: string; + /** + * @description The system role from which this role inherits permissions. + * @enum {string} + */ + base_role: "read" | "triage" | "write" | "maintain"; + /** @description A list of additional permissions included in this role. */ + permissions: string[]; + }; + }; + }; + }; + /** + * **Note**: This operation is in beta and is subject to change. + * + * Deletes a custom role from an organization. Once the custom role has been deleted, any + * user, team, or invitation with the deleted custom role will be reassigned the inherited role. + * + * To use this endpoint the authenticated user must be an administrator for the organization and must use an access token with `admin:org` scope. + * GitHub Apps must have the `organization_custom_roles:write` organization permission to use this endpoint. + * + * For more information about custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)." + */ + "orgs/delete-custom-role": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the role. */ + role_id: components["parameters"]["role-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Note**: This operation is in beta and subject to change. + * + * Updates a custom repository role that can be used by all repositories owned by the organization. + * + * To use this endpoint the authenticated user must be an administrator for the organization and must use an access token with `admin:org` scope. + * GitHub Apps must have the `organization_custom_roles:write` organization permission to use this endpoint. + * + * For more information about custom repository roles, see "[Managing custom repository roles for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-custom-repository-roles-for-an-organization)." + */ + "orgs/update-custom-role": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the role. */ + role_id: components["parameters"]["role-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-custom-repository-role"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the custom role. */ + name?: string; + /** @description A short description about who this role is for or what permissions it grants. */ + description?: string; + /** + * @description The system role from which this role inherits permissions. + * @enum {string} + */ + base_role?: "read" | "triage" | "write" | "maintain"; + /** @description A list of additional permissions included in this role. If specified, these permissions will replace any currently set on the role. */ + permissions?: string[]; + }; + }; + }; + }; + /** Lists all secrets available in an organization without revealing their encrypted values. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/list-org-secrets": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["organization-dependabot-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/get-org-public-key": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-public-key"]; + }; + }; + }; + }; + /** Gets a single organization secret without revealing its encrypted value. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/get-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-dependabot-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an organization secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "dependabot/create-or-update-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an organization public key](https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + /** + * @description Which type of organization repositories have access to the organization secret. `selected` means only the repositories specified by `selected_repository_ids` can access the secret. + * @enum {string} + */ + visibility: "all" | "private" | "selected"; + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can manage the list of selected repositories using the [List selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret), [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret), and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids?: (Partial & Partial)[]; + }; + }; + }; + }; + /** Deletes a secret in an organization using the secret name. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/delete-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists all repositories that have been selected when the `visibility` for repository access to a secret is set to `selected`. You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/list-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + }; + /** Replaces all repositories for an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/set-selected-repos-for-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids that can access the organization secret. You can only provide a list of repository ids when the `visibility` is set to `selected`. You can add and remove individual repositories using the [Set selected repositories for an organization secret](https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret) and [Remove selected repository from an organization secret](https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** Adds a repository to an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/add-selected-repo-to-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + /** Conflict when visibility type is not set to selected */ + 409: unknown; + }; + }; + /** Removes a repository from an organization secret when the `visibility` for repository access is set to `selected`. The visibility is set when you [Create or update an organization secret](https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret). You must authenticate using an access token with the `admin:org` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` organization permission to use this endpoint. */ + "dependabot/remove-selected-repo-from-org-secret": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** Response when repository was removed from the selected list */ + 204: never; + /** Conflict when visibility type not set to selected */ + 409: unknown; + }; + }; + "activity/list-public-org-events": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** The return hash contains `failed_at` and `failed_reason` fields which represent the time at which the invitation failed and the reason for the failure. */ + "orgs/list-failed-invitations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Note**: This operation is in beta and subject to change. + * + * Lists the fine-grained permissions available for an organization. + * + * To use this endpoint the authenticated user must be an administrator for the organization or of an repository of the organizaiton and must use an access token with `admin:org repo` scope. + * GitHub Apps must have the `organization_custom_roles:read` organization permission to use this endpoint. + */ + "orgs/list-fine-grained-permissions": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["organization-fine-grained-permission"][]; + }; + }; + }; + }; + "orgs/list-webhooks": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["org-hook"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Here's how you can create a hook that posts payloads in JSON format: */ + "orgs/create-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Must be passed as "web". */ + name: string; + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#create-hook-config-params). */ + config: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "kdaigle" */ + username?: string; + /** @example "password" */ + password?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + }; + }; + }; + }; + /** Returns a webhook configured in an organization. To get only the webhook `config` properties, see "[Get a webhook configuration for an organization](/rest/reference/orgs#get-a-webhook-configuration-for-an-organization)." */ + "orgs/get-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "orgs/delete-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Updates a webhook configured in an organization. When you update a webhook, the `secret` will be overwritten. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for an organization](/rest/reference/orgs#update-a-webhook-configuration-for-an-organization)." */ + "orgs/update-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/orgs#update-hook-config-params). */ + config?: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + /** @example "web" */ + name?: string; + }; + }; + }; + }; + /** + * Returns the webhook configuration for an organization. To get more information about the webhook, including the `active` state and `events`, use "[Get an organization webhook ](/rest/reference/orgs#get-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:read` permission. + */ + "orgs/get-webhook-config-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Updates the webhook configuration for an organization. To update more information about the webhook, including the `active` state and `events`, use "[Update an organization webhook ](/rest/reference/orgs#update-an-organization-webhook)." + * + * Access tokens must have the `admin:org_hook` scope, and GitHub Apps must have the `organization_hooks:write` permission. + */ + "orgs/update-webhook-config-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + }; + /** Returns a list of webhook deliveries for a webhook configured in an organization. */ + "orgs/list-webhook-deliveries": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: components["parameters"]["cursor"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns a delivery for a webhook configured in an organization. */ + "orgs/get-webhook-delivery": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Redeliver a delivery for a webhook configured in an organization. */ + "orgs/redeliver-webhook-delivery": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + "orgs/ping-webhook": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Enables an authenticated GitHub App to find the organization's installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-org-installation": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + }; + }; + /** Lists all GitHub Apps in an organization. The installation count includes all GitHub Apps installed on repositories in the organization. You must be an organization owner with `admin:read` scope to use this endpoint. */ + "orgs/list-app-installations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + installations: components["schemas"]["installation"][]; + }; + }; + }; + }; + }; + /** Shows which type of GitHub user can interact with this organization and when the restriction expires. If there is no restrictions, you will see an empty response. */ + "interactions/get-restrictions-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": Partial< + components["schemas"]["interaction-limit-response"] + > & + Partial<{ [key: string]: unknown }>; + }; + }; + }; + }; + /** Temporarily restricts interactions to a certain type of GitHub user in any public repository in the given organization. You must be an organization owner to set these restrictions. Setting the interaction limit at the organization level will overwrite any interaction limits that are set for individual repositories owned by the organization. */ + "interactions/set-restrictions-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + }; + /** Removes all interaction restrictions from public repositories in the given organization. You must be an organization owner to remove restrictions. */ + "interactions/remove-restrictions-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. */ + "orgs/list-pending-invitations": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Invite people to an organization by using their GitHub user ID or their email address. In order to create invitations in an organization, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "orgs/create-invitation": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["organization-invitation"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description **Required unless you provide `email`**. GitHub user ID for the person you are inviting. */ + invitee_id?: number; + /** @description **Required unless you provide `invitee_id`**. Email address of the person you are inviting, which can be an existing GitHub user. */ + email?: string; + /** + * @description The role for the new member. + * \* `admin` - Organization owners with full administrative rights to the organization and complete access to all repositories and teams. + * \* `direct_member` - Non-owner organization members with ability to see other members and join teams by invitation. + * \* `billing_manager` - Non-owner organization members with ability to manage the billing settings of your organization. + * @default direct_member + * @enum {string} + */ + role?: "admin" | "direct_member" | "billing_manager"; + /** @description Specify IDs for the teams you want to invite new members to. */ + team_ids?: number[]; + }; + }; + }; + }; + /** + * Cancel an organization invitation. In order to cancel an organization invitation, the authenticated user must be an organization owner. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). + */ + "orgs/cancel-invitation": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** List all teams associated with an invitation. In order to see invitations in an organization, the authenticated user must be an organization owner. */ + "orgs/list-invitation-teams": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List issues in an organization assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** List all users who are members of an organization. If the authenticated user is also a member of this organization then both concealed and public members will be returned. */ + "orgs/list-members": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Filter members returned in the list. `2fa_disabled` means that only members without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled will be returned. This options is only available for organization owners. */ + filter?: "2fa_disabled" | "all"; + /** Filter members returned by their role. */ + role?: "all" | "admin" | "member"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Check if a user is, publicly or privately, a member of the organization. */ + "orgs/check-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response if requester is an organization member and user is a member */ + 204: never; + /** Response if requester is not an organization member */ + 302: never; + /** Not Found if requester is an organization member and user is not a member */ + 404: unknown; + }; + }; + /** Removing a user from this list will remove them from all teams and they will no longer have any access to the organization's repositories. */ + "orgs/remove-member": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/delete-from-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `admin:org` scope to use this endpoint. + */ + "codespaces/stop-in-organization": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** In order to get a user's membership with an organization, the authenticated user must be an organization member. The `state` parameter in the response can be used to identify the user's membership status. */ + "orgs/get-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Only authenticated organization owners can add a member to the organization or update the member's role. + * + * * If the authenticated user is _adding_ a member to the organization, the invited user will receive an email inviting them to the organization. The user's [membership status](https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user) will be `pending` until they accept the invitation. + * + * * Authenticated users can _update_ a user's membership by passing the `role` parameter. If the authenticated user changes a member's role to `admin`, the affected user will receive an email notifying them that they've been made an organization owner. If the authenticated user changes an owner's role to `member`, no email will be sent. + * + * **Rate limits** + * + * To prevent abuse, the authenticated user is limited to 50 organization invitations per 24 hour period. If the organization is more than one month old or on a paid plan, the limit is 500 invitations per 24 hour period. + */ + "orgs/set-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The role to give the user in the organization. Can be one of: + * \* `admin` - The user will become an owner of the organization. + * \* `member` - The user will become a non-owner member of the organization. + * @default member + * @enum {string} + */ + role?: "admin" | "member"; + }; + }; + }; + }; + /** + * In order to remove a user's membership with an organization, the authenticated user must be an organization owner. + * + * If the specified user is an active member of the organization, this will remove them from the organization. If the specified user has been invited to the organization, this will cancel their invitation. The specified user will receive an email notification in both cases. + */ + "orgs/remove-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the most recent migrations. */ + "migrations/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Exclude attributes from the API response to improve performance */ + exclude?: "repositories"[]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["migration"][]; + }; + }; + }; + }; + /** Initiates the generation of a migration archive. */ + "migrations/start-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A list of arrays indicating which repositories should be migrated. */ + repositories: string[]; + /** + * @description Indicates whether repositories should be locked (to prevent manipulation) while migrating data. + * @default false + * @example true + */ + lock_repositories?: boolean; + /** + * @description Indicates whether metadata should be excluded and only git source should be included for the migration. + * @default false + */ + exclude_metadata?: boolean; + /** + * @description Indicates whether the repository git data should be excluded from the migration. + * @default false + */ + exclude_git_data?: boolean; + /** + * @description Indicates whether attachments should be excluded from the migration (to reduce migration archive file size). + * @default false + * @example true + */ + exclude_attachments?: boolean; + /** + * @description Indicates whether releases should be excluded from the migration (to reduce migration archive file size). + * @default false + * @example true + */ + exclude_releases?: boolean; + /** + * @description Indicates whether projects owned by the organization or users should be excluded. from the migration. + * @default false + * @example true + */ + exclude_owner_projects?: boolean; + /** + * @description Indicates whether this should only include organization metadata (repositories array should be empty and will ignore other flags). + * @default false + * @example true + */ + org_metadata_only?: boolean; + /** @description Exclude related items from being returned in the response in order to improve performance of the request. The array can include any of: `"repositories"`. */ + exclude?: "repositories"[]; + }; + }; + }; + }; + /** + * Fetches the status of a migration. + * + * The `state` of a migration can be one of the following values: + * + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + "migrations/get-status-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + /** Exclude attributes from the API response to improve performance */ + exclude?: "repositories"[]; + }; + }; + responses: { + /** + * * `pending`, which means the migration hasn't started yet. + * * `exporting`, which means the migration is in progress. + * * `exported`, which means the migration finished successfully. + * * `failed`, which means the migration failed. + */ + 200: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Fetches the URL to a migration archive. */ + "migrations/download-archive-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Deletes a previous migration archive. Migration archives are automatically deleted after seven days. */ + "migrations/delete-archive-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Unlocks a repository that was locked for migration. You should unlock each migrated repository and [delete them](https://docs.github.com/rest/reference/repos#delete-a-repository) when the migration is complete and you no longer need the source data. */ + "migrations/unlock-repo-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + /** repo_name parameter */ + repo_name: components["parameters"]["repo-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** List all the repositories for this organization migration. */ + "migrations/list-repos-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** List all users who are outside collaborators of an organization. */ + "orgs/list-outside-collaborators": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Filter the list of outside collaborators. `2fa_disabled` means that only outside collaborators without [two-factor authentication](https://github.com/blog/1614-two-factor-authentication) enabled will be returned. */ + filter?: "2fa_disabled" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** When an organization member is converted to an outside collaborator, they'll only have access to the repositories that their current team membership allows. The user will no longer be a member of the organization. For more information, see "[Converting an organization member to an outside collaborator](https://docs.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)". Converting an organization member to an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + "orgs/convert-member-to-outside-collaborator": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** User is getting converted asynchronously */ + 202: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** User was converted */ + 204: never; + /** Forbidden if user is the last owner of the organization, not a member of the organization, or if the enterprise enforces a policy for inviting outside collaborators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/en/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." */ + 403: unknown; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description When set to `true`, the request will be performed asynchronously. Returns a 202 status code when the job is successfully queued. + * @default false + */ + async?: boolean; + }; + }; + }; + }; + /** Removing a user from this list will remove them from all the organization's repositories. */ + "orgs/remove-outside-collaborator": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Unprocessable Entity if user is a member of the organization */ + 422: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + }; + }; + /** + * Lists all packages in an organization readable by the user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/list-packages-for-organization": { + parameters: { + query: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + visibility?: components["parameters"]["package-visibility"]; + }; + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Gets a specific package in an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-for-organization": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Deletes an entire package in an organization. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores an entire package in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** package token */ + token?: string; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists package versions for a package owned by an organization. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-all-package-versions-for-package-owned-by-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The state of the package, either active or deleted. */ + state?: "active" | "deleted"; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a specific package version in an organization. + * + * You must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-version-for-organization": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Deletes a specific package version in an organization. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-version-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a specific package version in an organization. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-version-for-org": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the projects in an organization. Returns a `404 Not Found` status if projects are disabled in the organization. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Indicates the state of the projects to return. */ + state?: "open" | "closed" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Creates an organization project board. Returns a `410 Gone` status if projects are disabled in the organization or if the organization does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/create-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the project. */ + name: string; + /** @description The description of the project. */ + body?: string; + }; + }; + }; + }; + /** Members of an organization can choose to have their membership publicized or not. */ + "orgs/list-public-members": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "orgs/check-public-membership-for-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response if user is a public member */ + 204: never; + /** Not Found if user is not a public member */ + 404: unknown; + }; + }; + /** + * The user can publicize their own membership. (A user cannot publicize the membership for another user.) + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "orgs/set-public-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + "orgs/remove-public-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists repositories for the specified organization. */ + "repos/list-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Specifies the types of repositories you want returned. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `type` can also be `internal`. However, the `internal` value is not yet supported when a GitHub App calls this API with an installation access token. */ + type?: + | "all" + | "public" + | "private" + | "forks" + | "sources" + | "member" + | "internal"; + /** The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Creates a new repository in the specified organization. The authenticated user must be a member of the organization. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + "repos/create-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the repository. */ + name: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Whether the repository is private. + * @default false + */ + private?: boolean; + /** + * @description Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`. Note: For GitHub Enterprise Server and GitHub AE, this endpoint will only list repositories available to all users on the enterprise. For more information, see "[Creating an internal repository](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/about-repository-visibility#about-internal-repositories)" in the GitHub Help documentation. + * @enum {string} + */ + visibility?: "public" | "private" | "internal"; + /** + * @description Either `true` to enable issues for this repository or `false` to disable them. + * @default true + */ + has_issues?: boolean; + /** + * @description Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. + * @default true + */ + has_projects?: boolean; + /** + * @description Either `true` to enable the wiki for this repository or `false` to disable it. + * @default true + */ + has_wiki?: boolean; + /** + * @description Either `true` to make this repo available as a template repository or `false` to prevent it. + * @default false + */ + is_template?: boolean; + /** @description The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ + team_id?: number; + /** + * @description Pass `true` to create an initial commit with empty README. + * @default false + */ + auto_init?: boolean; + /** @description Desired language or platform [.gitignore template](https://github.com/github/gitignore) to apply. Use the name of the template without the extension. For example, "Haskell". */ + gitignore_template?: string; + /** @description Choose an [open source license template](https://choosealicense.com/) that best suits your needs, and then use the [license keyword](https://docs.github.com/articles/licensing-a-repository/#searching-github-by-license-type) as the `license_template` string. For example, "mit" or "mpl-2.0". */ + license_template?: string; + /** + * @description Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. + * @default true + */ + allow_squash_merge?: boolean; + /** + * @description Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Either `true` to allow auto-merge on pull requests, or `false` to disallow auto-merge. + * @default false + */ + allow_auto_merge?: boolean; + /** + * @description Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @deprecated + * @description Either `true` to allow squash-merge commits to use pull request title, or `false` to use commit message. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + }; + }; + }; + }; + /** + * Lists secret scanning alerts for eligible repositories in an organization, from newest to oldest. + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-alerts-for-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + state?: components["parameters"]["secret-scanning-alert-state"]; + /** + * A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + /** A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + /** The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + sort?: components["parameters"]["secret-scanning-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + before?: components["parameters"]["secret-scanning-pagination-before-org-repo"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + after?: components["parameters"]["secret-scanning-pagination-after-org-repo"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-secret-scanning-alert"][]; + }; + }; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists teams that are security managers for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * To use this endpoint, you must be an administrator or security manager for the organization, and you must use an access token with the `read:org` scope. + * + * GitHub Apps must have the `administration` organization read permission to use this endpoint. + */ + "orgs/list-security-manager-teams": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-simple"][]; + }; + }; + }; + }; + /** + * Adds a team as a security manager for an organization. For more information, see "[Managing security for an organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) for an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `write:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + "orgs/add-security-manager-team": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** The organization has reached the maximum number of security manager teams. */ + 409: unknown; + }; + }; + /** + * Removes the security manager role from a team for an organization. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization) team from an organization." + * + * To use this endpoint, you must be an administrator for the organization, and you must use an access token with the `admin:org` scope. + * + * GitHub Apps must have the `administration` organization read-write permission to use this endpoint. + */ + "orgs/remove-security-manager-team": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-github-actions-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the GitHub Advanced Security active committers for an organization per repository. + * + * Each distinct user login across all repositories is counted as a single Advanced Security seat, so the `total_advanced_security_committers` is not the sum of advanced_security_committers for each repository. + * + * If this organization defers to an enterprise for billing, the `total_advanced_security_committers` returned from the organization API may include some users that are in more than one organization, so they will only consume a single Advanced Security seat at the enterprise level. + * + * The total number of repositories with committer information is tracked by the `total_count` field. + */ + "billing/get-github-advanced-security-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Success */ + 200: { + content: { + "application/json": components["schemas"]["advanced-security-active-committers"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + }; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-github-packages-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `repo` or `admin:org` scope. + */ + "billing/get-shared-storage-billing-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** Lists all teams in an organization that are visible to the authenticated user. */ + "teams/list": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * To create a team, the authenticated user must be a member or owner of `{org}`. By default, organization members can create teams. Organization owners can limit team creation to organization owners. For more information, see "[Setting team creation permissions](https://docs.github.com/en/articles/setting-team-creation-permissions-in-your-organization)." + * + * When you create a new team, you automatically become a team maintainer without explicitly adding yourself to the optional array of `maintainers`. For more information, see "[About teams](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/about-teams)". + */ + "teams/create": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name: string; + /** @description The description of the team. */ + description?: string; + /** @description List GitHub IDs for organization members who will become team maintainers. */ + maintainers?: string[]; + /** @description The full name (e.g., "organization-name/repository-name") of repositories to add the team to. */ + repo_names?: string[]; + /** + * @description The level of privacy this team should have. The options are: + * **For a non-nested team:** + * \* `secret` - only visible to organization owners and members of this team. + * \* `closed` - visible to all members of this organization. + * Default: `secret` + * **For a parent or child team:** + * \* `closed` - visible to all members of this organization. + * Default for child team: `closed` + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number; + }; + }; + }; + }; + /** + * Gets a team using the team's `slug`. GitHub generates the `slug` from the team `name`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}`. + */ + "teams/get-by-name": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}`. + */ + "teams/delete-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}`. + */ + "teams/update-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response when the updated information already exists */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name?: string; + /** @description The description of the team. */ + description?: string; + /** + * @description The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. When a team is nested, the `privacy` for parent teams cannot be `secret`. The options are: + * **For a non-nested team:** + * \* `secret` - only visible to organization owners and members of this team. + * \* `closed` - visible to all members of this organization. + * **For a parent or child team:** + * \* `closed` - visible to all members of this organization. + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number | null; + }; + }; + }; + }; + /** + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions`. + */ + "teams/list-discussions-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Pinned discussions only filter */ + pinned?: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion"][]; + }; + }; + }; + }; + /** + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions`. + */ + "teams/create-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title: string; + /** @description The discussion post's body text. */ + body: string; + /** + * @description Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. + * @default false + */ + private?: boolean; + }; + }; + }; + }; + /** + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/get-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/delete-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}`. + */ + "teams/update-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title?: string; + /** @description The discussion post's body text. */ + body?: string; + }; + }; + }; + }; + /** + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + "teams/list-discussion-comments-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion-comment"][]; + }; + }; + }; + }; + /** + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments`. + */ + "teams/create-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/get-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/delete-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PATCH /organizations/{org_id}/team/{team_id}/discussions/{discussion_number}/comments/{comment_number}`. + */ + "teams/update-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments/). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + "reactions/list-for-team-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions`. + */ + "reactions/create-for-team-discussion-comment-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response when the reaction type has already been added to this team discussion comment */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/comments/:comment_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/delete-for-team-discussion-comment": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + "reactions/list-for-team-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `POST /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions`. + */ + "reactions/create-for-team-discussion-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a team or organization with `team_id` and `org_id` using the route `DELETE /organizations/:org_id/team/:team_id/discussions/:discussion_number/reactions/:reaction_id`. + * + * Delete a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/delete-for-team-discussion": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/invitations`. + */ + "teams/list-pending-invitations-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + }; + }; + /** + * Team members will include the members of child teams. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + "teams/list-members-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** Filters members returned by their role in the team. */ + role?: "member" | "maintainer" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/memberships/{username}`. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + "teams/get-membership-for-user-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** if user has no team membership */ + 404: unknown; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adds an organization member to a team. An authenticated organization owner or team maintainer can add organization members to a team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * An organization owner can add someone who is not part of the team's organization to a team. When an organization owner adds someone to a team who is not an organization member, this endpoint will send an invitation to the person via email. This newly-created membership will be in the "pending" state until the person accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + "teams/add-or-update-membership-for-user-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** Forbidden if team synchronization is set up */ + 403: unknown; + /** Unprocessable Entity if you attempt to add an organization to a team */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The role that this user should have in the team. + * @default member + * @enum {string} + */ + role?: "member" | "maintainer"; + }; + }; + }; + }; + /** + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/memberships/{username}`. + */ + "teams/remove-membership-for-user-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Forbidden if team synchronization is set up */ + 403: unknown; + }; + }; + /** + * Lists the organization projects for a team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects`. + */ + "teams/list-projects-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-project"][]; + }; + }; + }; + }; + /** + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/check-permissions-for-project-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-project"]; + }; + }; + /** Not Found if project is not managed by this team */ + 404: unknown; + }; + }; + /** + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/add-or-update-project-permissions-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Forbidden if the project is not owned by the organization */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant to the team for this project. Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + } | null; + }; + }; + }; + /** + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. This endpoint removes the project from the team, but does not delete the project. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/projects/{project_id}`. + */ + "teams/remove-project-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists a team's repositories visible to the authenticated user. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos`. + */ + "teams/list-repos-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Checks whether a team has `admin`, `push`, `maintain`, `triage`, or `pull` permission for a repository. Repositories inherited through a parent team will also be checked. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `application/vnd.github.v3.repository+json` accept header. + * + * If a team doesn't have permission for the repository, you will receive a `404 Not Found` response status. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + "teams/check-permissions-for-repo-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Alternative response with repository permissions */ + 200: { + content: { + "application/json": components["schemas"]["team-repository"]; + }; + }; + /** Response if team has permission for the repository. This is the response when the repository media type hasn't been provded in the Accept header. */ + 204: never; + /** Not Found if team does not have permission for the repository */ + 404: unknown; + }; + }; + /** + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `PUT /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + * + * For more information about the permission levels, see "[Repository permission levels for an organization](https://docs.github.com/en/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". + */ + "teams/add-or-update-repo-permissions-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the team on this repository. We accept the following permissions to be set: `pull`, `triage`, `push`, `maintain`, `admin` and you can also specify a custom repository role name, if the owning organization has defined any. If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. + * @default push + */ + permission?: string; + }; + }; + }; + }; + /** + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. This does not delete the repository, it just removes it from the team. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `DELETE /organizations/{org_id}/team/{team_id}/repos/{owner}/{repo}`. + */ + "teams/remove-repo-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists the child teams of the team specified by `{team_slug}`. + * + * **Note:** You can also specify a team by `org_id` and `team_id` using the route `GET /organizations/{org_id}/team/{team_id}/teams`. + */ + "teams/list-child-in-org": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The slug of the team name. */ + team_slug: components["parameters"]["team-slug"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** if child teams exist */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + }; + }; + /** + * Enables or disables the specified security feature for all repositories in an organization. + * + * To use this endpoint, you must be an organization owner or be member of a team with the security manager role. + * A token with the 'write:org' scope is also required. + * + * GitHub Apps must have the `organization_administration:write` permission to use this endpoint. + * + * For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + */ + "orgs/enable-or-disable-security-product-on-all-org-repos": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + /** The security feature to enable or disable. */ + security_product: components["parameters"]["security-product"]; + /** + * The action to take. + * + * `enable_all` means to enable the specified security feature for all repositories in the organization. + * `disable_all` means to disable the specified security feature for all repositories in the organization. + */ + enablement: components["parameters"]["org-security-product-enablement"]; + }; + }; + responses: { + /** Action started */ + 204: never; + /** The action could not be taken due to an in progress enablement, or a policy is preventing enablement */ + 422: unknown; + }; + }; + "projects/get-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "projects/delete-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "projects/update-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The project card's note + * @example Update all gems + */ + note?: string | null; + /** + * @description Whether or not the card is archived + * @example false + */ + archived?: boolean; + }; + }; + }; + }; + "projects/move-card": { + parameters: { + path: { + /** The unique identifier of the card. */ + card_id: components["parameters"]["card-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + resource?: string; + field?: string; + }[]; + }; + }; + }; + 422: components["responses"]["validation_failed"]; + /** Response */ + 503: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + }[]; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The position of the card in a column. Can be one of: `top`, `bottom`, or `after:` to place after the specified card. + * @example bottom + */ + position: string; + /** + * @description The unique identifier of the column the card should be moved to + * @example 42 + */ + column_id?: number; + }; + }; + }; + }; + "projects/get-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "projects/delete-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "projects/update-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + }; + }; + }; + }; + "projects/list-cards": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + query: { + /** Filters the project cards that are returned by the card's state. */ + archived_state?: "all" | "archived" | "not_archived"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project-card"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "projects/create-card": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project-card"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** Validation failed */ + 422: { + content: { + "application/json": + | components["schemas"]["validation-error"] + | components["schemas"]["validation-error-simple"]; + }; + }; + /** Response */ + 503: { + content: { + "application/json": { + code?: string; + message?: string; + documentation_url?: string; + errors?: { + code?: string; + message?: string; + }[]; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": + | { + /** + * @description The project card's note + * @example Update all gems + */ + note: string | null; + } + | { + /** + * @description The unique identifier of the content associated with the card + * @example 42 + */ + content_id: number; + /** + * @description The piece of content associated with the card + * @example PullRequest + */ + content_type: string; + }; + }; + }; + }; + "projects/move-column": { + parameters: { + path: { + /** The unique identifier of the column. */ + column_id: components["parameters"]["column-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The position of the column in a project. Can be one of: `first`, `last`, or `after:` to place after the specified column. + * @example last + */ + position: string; + }; + }; + }; + }; + /** Gets a project by its `id`. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/get": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Deletes a project board. Returns a `404 Not Found` status if projects are disabled. */ + "projects/delete": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Delete Success */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Updates a project board's information. Returns a `404 Not Found` status if projects are disabled. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/update": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + /** Forbidden */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + errors?: string[]; + }; + }; + }; + /** Not Found if the authenticated user does not have access to the project */ + 404: unknown; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project + * @example Week One Sprint + */ + name?: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body?: string | null; + /** + * @description State of the project; either 'open' or 'closed' + * @example open + */ + state?: string; + /** + * @description The baseline permission that all organization members have on this project + * @enum {string} + */ + organization_permission?: "read" | "write" | "admin" | "none"; + /** @description Whether or not this project can be seen by everyone. */ + private?: boolean; + }; + }; + }; + }; + /** Lists the collaborators for an organization project. For a project, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. You must be an organization owner or a project `admin` to list collaborators. */ + "projects/list-collaborators": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + query: { + /** Filters the collaborators by their affiliation. `outside` means outside collaborators of a project that are not a member of the project's organization. `direct` means collaborators with permissions to a project, regardless of organization membership status. `all` means all collaborators the authenticated user can see. */ + affiliation?: "outside" | "direct" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Adds a collaborator to an organization project and sets their permission level. You must be an organization owner or a project `admin` to add a collaborator. */ + "projects/add-collaborator": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the collaborator. + * @default write + * @example write + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + } | null; + }; + }; + }; + /** Removes a collaborator from an organization project. You must be an organization owner or a project `admin` to remove a collaborator. */ + "projects/remove-collaborator": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns the collaborator's permission level for an organization project. Possible values for the `permission` key: `admin`, `write`, `read`, `none`. You must be an organization owner or a project `admin` to review a user's permission level. */ + "projects/get-permission-for-user": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["project-collaborator-permission"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "projects/list-columns": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project-column"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "projects/create-column": { + parameters: { + path: { + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project-column"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project column + * @example Remaining tasks + */ + name: string; + }; + }; + }; + }; + /** + * **Note:** Accessing this endpoint does not count against your REST API rate limit. + * + * **Note:** The `rate` object is deprecated. If you're writing new API client code or updating existing code, you should use the `core` object instead of the `rate` object. The `core` object contains the same information that is present in the `rate` object. + */ + "rate-limit/get": { + parameters: {}; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["rate-limit-overview"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** The `parent` and `source` objects are present when the repository is a fork. `parent` is the repository this repository was forked from, `source` is the ultimate source for the network. */ + "repos/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required. + * + * If an organization owner has configured the organization to prevent members from deleting organization-owned + * repositories, you will get a `403 Forbidden` response. + */ + "repos/delete": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 307: components["responses"]["temporary_redirect"]; + /** If an organization owner has configured the organization to prevent members from deleting organization-owned repositories, a member will get this response: */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note**: To edit a repository's topics, use the [Replace all repository topics](https://docs.github.com/rest/reference/repos#replace-all-repository-topics) endpoint. */ + "repos/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 307: components["responses"]["temporary_redirect"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the repository. */ + name?: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Either `true` to make the repository private or `false` to make it public. Default: `false`. + * **Note**: You will get a `422` error if the organization restricts [changing repository visibility](https://docs.github.com/articles/repository-permission-levels-for-an-organization#changing-the-visibility-of-repositories) to organization owners and a non-owner tries to change the value of private. + * @default false + */ + private?: boolean; + /** + * @description Can be `public` or `private`. If your organization is associated with an enterprise account using GitHub Enterprise Cloud or GitHub Enterprise Server 2.20+, `visibility` can also be `internal`." + * @enum {string} + */ + visibility?: "public" | "private" | "internal"; + /** + * @description Specify which security and analysis features to enable or disable for the repository. + * + * To use this parameter, you must have admin permissions for the repository or be an owner or security manager for the organization that owns the repository. For more information, see "[Managing security managers in your organization](https://docs.github.com/organizations/managing-peoples-access-to-your-organization-with-roles/managing-security-managers-in-your-organization)." + * + * For example, to enable GitHub Advanced Security, use this data in the body of the `PATCH` request: + * `{ "security_and_analysis": {"advanced_security": { "status": "enabled" } } }`. + * + * You can check which security and analysis features are currently enabled by using a `GET /repos/{owner}/{repo}` request. + */ + security_and_analysis?: { + /** @description Use the `status` property to enable or disable GitHub Advanced Security for this repository. For more information, see "[About GitHub Advanced Security](/github/getting-started-with-github/learning-about-github/about-github-advanced-security)." */ + advanced_security?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + /** @description Use the `status` property to enable or disable secret scanning for this repository. For more information, see "[About secret scanning](/code-security/secret-security/about-secret-scanning)." */ + secret_scanning?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + /** @description Use the `status` property to enable or disable secret scanning push protection for this repository. For more information, see "[Protecting pushes with secret scanning](/code-security/secret-scanning/protecting-pushes-with-secret-scanning)." */ + secret_scanning_push_protection?: { + /** @description Can be `enabled` or `disabled`. */ + status?: string; + }; + } | null; + /** + * @description Either `true` to enable issues for this repository or `false` to disable them. + * @default true + */ + has_issues?: boolean; + /** + * @description Either `true` to enable projects for this repository or `false` to disable them. **Note:** If you're creating a repository in an organization that has disabled repository projects, the default is `false`, and if you pass `true`, the API returns an error. + * @default true + */ + has_projects?: boolean; + /** + * @description Either `true` to enable the wiki for this repository or `false` to disable it. + * @default true + */ + has_wiki?: boolean; + /** + * @description Either `true` to make this repo available as a template repository or `false` to prevent it. + * @default false + */ + is_template?: boolean; + /** @description Updates the default branch for this repository. */ + default_branch?: string; + /** + * @description Either `true` to allow squash-merging pull requests, or `false` to prevent squash-merging. + * @default true + */ + allow_squash_merge?: boolean; + /** + * @description Either `true` to allow merging pull requests with a merge commit, or `false` to prevent merging pull requests with merge commits. + * @default true + */ + allow_merge_commit?: boolean; + /** + * @description Either `true` to allow rebase-merging pull requests, or `false` to prevent rebase-merging. + * @default true + */ + allow_rebase_merge?: boolean; + /** + * @description Either `true` to allow auto-merge on pull requests, or `false` to disallow auto-merge. + * @default false + */ + allow_auto_merge?: boolean; + /** + * @description Either `true` to allow automatically deleting head branches when pull requests are merged, or `false` to prevent automatic deletion. + * @default false + */ + delete_branch_on_merge?: boolean; + /** + * @description Either `true` to always allow a pull request head branch that is behind its base branch to be updated even if it is not required to be up to date before merging, or false otherwise. + * @default false + */ + allow_update_branch?: boolean; + /** + * @deprecated + * @description Either `true` to allow squash-merge commits to use pull request title, or `false` to use commit message. **This property has been deprecated. Please use `squash_merge_commit_title` instead. + * @default false + */ + use_squash_pr_title_as_default?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description `true` to archive this repository. **Note**: You cannot unarchive repositories through the API. + * @default false + */ + archived?: boolean; + /** + * @description Either `true` to allow private forks, or `false` to prevent private forks. + * @default false + */ + allow_forking?: boolean; + /** + * @description Either `true` to require contributors to sign off on web-based commits, or `false` to not require contributors to sign off on web-based commits. + * @default false + */ + web_commit_signoff_required?: boolean; + }; + }; + }; + }; + /** Lists all artifacts for a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/list-artifacts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + artifacts: components["schemas"]["artifact"][]; + }; + }; + }; + }; + }; + /** Gets a specific artifact for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-artifact": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the artifact. */ + artifact_id: components["parameters"]["artifact-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["artifact"]; + }; + }; + }; + }; + /** Deletes an artifact for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/delete-artifact": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the artifact. */ + artifact_id: components["parameters"]["artifact-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets a redirect URL to download an archive for a repository. This URL expires after 1 minute. Look for `Location:` in + * the response header to find the URL for the download. The `:archive_format` must be `zip`. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/download-artifact": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the artifact. */ + artifact_id: components["parameters"]["artifact-id"]; + archive_format: string; + }; + }; + responses: { + /** Response */ + 302: never; + 410: components["responses"]["gone"]; + }; + }; + /** + * Gets GitHub Actions cache usage for a repository. + * The data fetched using this API is refreshed approximately every 5 minutes, so values returned from this endpoint may take at least 5 minutes to get updated. + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-actions-cache-usage": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-cache-usage-by-repository"]; + }; + }; + }; + }; + /** + * Lists the GitHub Actions caches for a repository. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-actions-cache-list": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + /** An explicit key or prefix for identifying the cache */ + key?: components["parameters"]["actions-cache-key"]; + /** The property to sort the results by. `created_at` means when the cache was created. `last_accessed_at` means when the cache was last accessed. `size_in_bytes` is the size of the cache in bytes. */ + sort?: components["parameters"]["actions-cache-list-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["actions-cache-list"]; + }; + }; + }; + }; + /** + * Deletes one or more GitHub Actions caches for a repository, using a complete cache key. By default, all caches that match the provided key are deleted, but you can optionally provide a Git ref to restrict deletions to caches that match both the provided key and the Git ref. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-actions-cache-by-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** A key for identifying the cache. */ + key: components["parameters"]["actions-cache-key-required"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-cache-list"]; + }; + }; + }; + }; + /** + * Deletes a GitHub Actions cache for a repository, using a cache ID. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/delete-actions-cache-by-id": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the GitHub Actions cache. */ + cache_id: components["parameters"]["cache-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Gets a specific job in a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-job-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the job. */ + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["job"]; + }; + }; + }; + }; + /** + * Gets a redirect URL to download a plain text file of logs for a workflow job. This link expires after 1 minute. Look + * for `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can + * use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must + * have the `actions:read` permission to use this endpoint. + */ + "actions/download-job-logs-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the job. */ + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** Re-run a job and its dependent jobs in a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/re-run-job-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the job. */ + job_id: components["parameters"]["job-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + }; + /** + * Gets the GitHub Actions permissions policy for a repository, including whether GitHub Actions is enabled and the actions and reusable workflows allowed to run in the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/get-github-actions-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-repository-permissions"]; + }; + }; + }; + }; + /** + * Sets the GitHub Actions permissions policy for enabling GitHub Actions and allowed actions and reusable workflows in the repository. + * + * If the repository belongs to an organization or enterprise that has set restrictive permissions at the organization or enterprise levels, such as `allowed_actions` to `selected` actions and reusable workflows, then you cannot override them for the repository. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/set-github-actions-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + enabled: components["schemas"]["actions-enabled"]; + allowed_actions?: components["schemas"]["allowed-actions"]; + }; + }; + }; + }; + /** + * Gets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + "actions/get-workflow-access-to-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-workflow-access-to-repository"]; + }; + }; + }; + }; + /** + * Sets the level of access that workflows outside of the repository have to actions and reusable workflows in the repository. + * This endpoint only applies to internal repositories. For more information, see "[Managing GitHub Actions settings for a repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#allowing-access-to-components-in-an-internal-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the + * repository `administration` permission to use this endpoint. + */ + "actions/set-workflow-access-to-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-workflow-access-to-repository"]; + }; + }; + }; + /** + * Gets the settings for selected actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/get-allowed-actions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + }; + /** + * Sets the actions and reusable workflows that are allowed in a repository. To use this endpoint, the repository permission policy for `allowed_actions` must be configured to `selected`. For more information, see "[Set GitHub Actions permissions for a repository](#set-github-actions-permissions-for-a-repository)." + * + * If the repository belongs to an organization or enterprise that has `selected` actions and reusable workflows set at the organization or enterprise levels, then you cannot override any of the allowed actions and reusable workflows settings. + * + * To use the `patterns_allowed` setting for private repositories, the repository must belong to an enterprise. If the repository does not belong to an enterprise, then the `patterns_allowed` setting only applies to public repositories. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration` repository permission to use this API. + */ + "actions/set-allowed-actions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["selected-actions"]; + }; + }; + }; + /** + * Gets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, + * as well as if GitHub Actions can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + "actions/get-github-actions-default-workflow-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-get-default-workflow-permissions"]; + }; + }; + }; + }; + /** + * Sets the default workflow permissions granted to the `GITHUB_TOKEN` when running workflows in a repository, and sets if GitHub Actions + * can submit approving pull request reviews. + * For more information, see "[Setting the permissions of the GITHUB_TOKEN for your repository](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the repository `administration` permission to use this API. + */ + "actions/set-github-actions-default-workflow-permissions-repository": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Success response */ + 204: never; + /** Conflict response when changing a setting is prevented by the owning organization or enterprise */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["actions-set-default-workflow-permissions"]; + }; + }; + }; + /** Lists all self-hosted runners configured in a repository. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + "actions/list-self-hosted-runners-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + runners: components["schemas"]["runner"][]; + }; + }; + }; + }; + }; + /** + * Lists binaries for the runner application that you can download and run. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/list-runner-applications-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner-application"][]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to the `config` script. The token expires after one hour. You must authenticate + * using an access token with the `repo` scope to use this endpoint. + * + * #### Example using registration token + * + * Configure your self-hosted runner, replacing `TOKEN` with the registration token provided by this endpoint. + * + * ``` + * ./config.sh --url https://github.com/octo-org/octo-repo-artifacts --token TOKEN + * ``` + */ + "actions/create-registration-token-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Returns a token that you can pass to remove a self-hosted runner from a repository. The token expires after one hour. + * You must authenticate using an access token with the `repo` scope to use this endpoint. + * + * #### Example using remove token + * + * To remove your self-hosted runner from a repository, replace TOKEN with the remove token provided by this endpoint. + * + * ``` + * ./config.sh remove --token TOKEN + * ``` + */ + "actions/create-remove-token-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["authentication-token"]; + }; + }; + }; + }; + /** + * Gets a specific self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/get-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["runner"]; + }; + }; + }; + }; + /** + * Forces the removal of a self-hosted runner from a repository. You can use this endpoint to completely remove the runner when the machine you were using no longer exists. + * + * You must authenticate using an access token with the `repo` + * scope to use this endpoint. + */ + "actions/delete-self-hosted-runner-from-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists all labels for a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/list-labels-for-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove all previous custom labels and set the new custom labels for a specific + * self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/set-custom-labels-for-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to set for the runner. You can pass an empty array to remove all custom labels. */ + labels: string[]; + }; + }; + }; + }; + /** + * Add custom labels to a self-hosted runner configured in a repository. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/add-custom-labels-to-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The names of the custom labels to add to the runner. */ + labels: string[]; + }; + }; + }; + }; + /** + * Remove all custom labels from a self-hosted runner configured in a + * repository. Returns the remaining read-only labels from the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/remove-all-custom-labels-from-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels_readonly"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a custom label from a self-hosted runner configured + * in a repository. Returns the remaining labels from the runner. + * + * This endpoint returns a `404 Not Found` status if the custom label is not + * present on the runner. + * + * You must authenticate using an access token with the `repo` scope to use this + * endpoint. + */ + "actions/remove-custom-label-from-self-hosted-runner-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** Unique identifier of the self-hosted runner. */ + runner_id: components["parameters"]["runner-id"]; + /** The name of a self-hosted runner's custom label. */ + name: components["parameters"]["runner-label-name"]; + }; + }; + responses: { + 200: components["responses"]["actions_runner_labels"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * Lists all workflow runs for a repository. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/list-workflow-runs-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor?: components["parameters"]["actor"]; + /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + branch?: components["parameters"]["workflow-run-branch"]; + /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event?: components["parameters"]["event"]; + /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. */ + status?: components["parameters"]["workflow-run-status"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created?: components["parameters"]["created"]; + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + /** Returns workflow runs with the `check_suite_id` that you specify. */ + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + /** Only returns workflow runs that are associated with the specified `head_sha`. */ + head_sha?: components["parameters"]["workflow-run-head-sha"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** Gets a specific workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + query: { + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run"]; + }; + }; + }; + }; + /** + * Delete a specific workflow run. Anyone with write access to the repository can use this endpoint. If the repository is + * private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:write` permission to use + * this endpoint. + */ + "actions/delete-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-reviews-for-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["environment-approvals"][]; + }; + }; + }; + }; + /** + * Approves a workflow run for a pull request from a public fork of a first time contributor. For more information, see ["Approving workflow runs from public forks](https://docs.github.com/actions/managing-workflow-runs/approving-workflow-runs-from-public-forks)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/approve-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists artifacts for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/list-workflow-run-artifacts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + artifacts: components["schemas"]["artifact"][]; + }; + }; + }; + }; + }; + /** + * Gets a specific workflow run attempt. Anyone with read access to the repository + * can use this endpoint. If the repository is private you must use an access token + * with the `repo` scope. GitHub Apps must have the `actions:read` permission to + * use this endpoint. + */ + "actions/get-workflow-run-attempt": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + /** The attempt number of the workflow run. */ + attempt_number: components["parameters"]["attempt-number"]; + }; + query: { + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run"]; + }; + }; + }; + }; + /** Lists jobs for a specific workflow run attempt. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + "actions/list-jobs-for-workflow-run-attempt": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + /** The attempt number of the workflow run. */ + attempt_number: components["parameters"]["attempt-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + jobs: components["schemas"]["job"][]; + }; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a redirect URL to download an archive of log files for a specific workflow run attempt. This link expires after + * 1 minute. Look for `Location:` in the response header to find the URL for the download. Anyone with read access to + * the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + * GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/download-workflow-run-attempt-logs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + /** The attempt number of the workflow run. */ + attempt_number: components["parameters"]["attempt-number"]; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** Cancels a workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/cancel-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + 409: components["responses"]["conflict"]; + }; + }; + /** Lists jobs for a workflow run. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). */ + "actions/list-jobs-for-workflow-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + query: { + /** Filters jobs by their `completed_at` timestamp. `latest` returns jobs from the most recent execution of the workflow run. `all` returns all jobs for a workflow run, including from old executions of the workflow run. */ + filter?: "latest" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + jobs: components["schemas"]["job"][]; + }; + }; + }; + }; + }; + /** + * Gets a redirect URL to download an archive of log files for a workflow run. This link expires after 1 minute. Look for + * `Location:` in the response header to find the URL for the download. Anyone with read access to the repository can use + * this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have + * the `actions:read` permission to use this endpoint. + */ + "actions/download-workflow-run-logs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** Deletes all logs for a workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/delete-workflow-run-logs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Get all deployment environments for a workflow run that are waiting for protection rules to pass. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-pending-deployments-for-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pending-deployment"][]; + }; + }; + }; + }; + /** + * Approve or reject pending deployments that are waiting on approval by a required reviewer. + * + * Required reviewers with read access to the repository contents and deployments can use this endpoint. Required reviewers must authenticate using an access token with the `repo` scope to use this endpoint. + */ + "actions/review-pending-deployments-for-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment"][]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The list of environment ids to approve or reject + * @example [ + * 161171787, + * 161171795 + * ] + */ + environment_ids: number[]; + /** + * @description Whether to approve or reject deployment to the specified environments. + * @example approved + * @enum {string} + */ + state: "approved" | "rejected"; + /** + * @description A comment to accompany the deployment review + * @example Ship it! + */ + comment: string; + }; + }; + }; + }; + /** Re-runs your workflow run using its `id`. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. */ + "actions/re-run-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + }; + /** Re-run all of the failed jobs and their dependent jobs in a workflow run using the `id` of the workflow run. You must authenticate using an access token with the `repo` scope to use this endpoint. */ + "actions/re-run-workflow-failed-jobs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to enable debug logging for the re-run. + * @default false + */ + enable_debug_logging?: boolean; + } | null; + }; + }; + }; + /** + * Gets the number of billable minutes and total run time for a specific workflow run. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-run-usage": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the workflow run. */ + run_id: components["parameters"]["run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-run-usage"]; + }; + }; + }; + }; + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/list-repo-secrets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-repo-public-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/actions#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + }; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/delete-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Lists the workflows in a repository. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/list-repo-workflows": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + workflows: components["schemas"]["workflow"][]; + }; + }; + }; + }; + }; + /** Gets a specific workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. */ + "actions/get-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow"]; + }; + }; + }; + }; + /** + * Disables a workflow and sets the `state` of the workflow to `disabled_manually`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/disable-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * You can use this endpoint to manually trigger a GitHub Actions workflow run. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must configure your GitHub Actions workflow to run when the [`workflow_dispatch` webhook](/developers/webhooks-and-events/webhook-events-and-payloads#workflow_dispatch) event occurs. The `inputs` are configured in the workflow file. For more information about how to configure the `workflow_dispatch` event in the workflow file, see "[Events that trigger workflows](/actions/reference/events-that-trigger-workflows#workflow_dispatch)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)." + */ + "actions/create-workflow-dispatch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description The git reference for the workflow. The reference can be a branch or tag name. */ + ref: string; + /** @description Input keys and values configured in the workflow file. The maximum number of properties is 10. Any default properties configured in the workflow file will be used when `inputs` are omitted. */ + inputs?: { [key: string]: string }; + }; + }; + }; + }; + /** + * Enables a workflow and sets the `state` of the workflow to `active`. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `actions:write` permission to use this endpoint. + */ + "actions/enable-workflow": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List all workflow runs for a workflow. You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. You can use parameters to narrow the list of results. For more information about using parameters, see [Parameters](https://docs.github.com/rest/overview/resources-in-the-rest-api#parameters). + * + * Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. + */ + "actions/list-workflow-runs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + query: { + /** Returns someone's workflow runs. Use the login for the user who created the `push` associated with the check suite or workflow run. */ + actor?: components["parameters"]["actor"]; + /** Returns workflow runs associated with a branch. Use the name of the branch of the `push`. */ + branch?: components["parameters"]["workflow-run-branch"]; + /** Returns workflow run triggered by the event you specify. For example, `push`, `pull_request` or `issue`. For more information, see "[Events that trigger workflows](https://docs.github.com/en/actions/automating-your-workflow-with-github-actions/events-that-trigger-workflows)." */ + event?: components["parameters"]["event"]; + /** Returns workflow runs with the check run `status` or `conclusion` that you specify. For example, a conclusion can be `success` or a status can be `in_progress`. Only GitHub can set a status of `waiting` or `requested`. */ + status?: components["parameters"]["workflow-run-status"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Returns workflow runs created within the given date-time range. For more information on the syntax, see "[Understanding the search syntax](https://docs.github.com/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates)." */ + created?: components["parameters"]["created"]; + /** If `true` pull requests are omitted from the response (empty array). */ + exclude_pull_requests?: components["parameters"]["exclude-pull-requests"]; + /** Returns workflow runs with the `check_suite_id` that you specify. */ + check_suite_id?: components["parameters"]["workflow-run-check-suite-id"]; + /** Only returns workflow runs that are associated with the specified `head_sha`. */ + head_sha?: components["parameters"]["workflow-run-head-sha"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + workflow_runs: components["schemas"]["workflow-run"][]; + }; + }; + }; + }; + }; + /** + * Gets the number of billable minutes used by a specific workflow during the current billing cycle. Billable minutes only apply to workflows in private repositories that use GitHub-hosted runners. Usage is listed for each GitHub-hosted runner operating system in milliseconds. Any job re-runs are also included in the usage. The usage does not include the multiplier for macOS and Windows runners and is not rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * You can replace `workflow_id` with the workflow file name. For example, you could use `main.yaml`. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "actions/get-workflow-usage": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the workflow. You can also pass the workflow file name as a string. */ + workflow_id: components["parameters"]["workflow-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["workflow-usage"]; + }; + }; + }; + }; + /** Lists the [available assignees](https://docs.github.com/articles/assigning-issues-and-pull-requests-to-other-github-users/) for issues in a repository. */ + "issues/list-assignees": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Checks if a user has permission to be assigned to an issue in this repository. + * + * If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. + * + * Otherwise a `404` status code is returned. + */ + "issues/check-user-can-be-assigned": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + assignee: string; + }; + }; + responses: { + /** If the `assignee` can be assigned to issues in the repository, a `204` header with no content is returned. */ + 204: never; + /** Otherwise a `404` status code is returned. */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * This returns a list of autolinks configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/list-autolinks": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["autolink"][]; + }; + }; + }; + }; + /** Users with admin access to the repository can create an autolink. */ + "repos/create-autolink": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["autolink"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description This prefix appended by certain characters will generate a link any time it is found in an issue, pull request, or commit. */ + key_prefix: string; + /** @description The URL must contain `` for the reference number. `` matches different characters depending on the value of `is_alphanumeric`. */ + url_template: string; + /** + * @description Whether this autolink reference matches alphanumeric characters. If true, the `` parameter of the `url_template` matches alphanumeric characters `A-Z` (case insensitive), `0-9`, and `-`. If false, this autolink reference only matches numeric characters. + * @default true + */ + is_alphanumeric?: boolean; + }; + }; + }; + }; + /** + * This returns a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/get-autolink": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the autolink. */ + autolink_id: components["parameters"]["autolink-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["autolink"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * This deletes a single autolink reference by ID that was configured for the given repository. + * + * Information about autolinks are only available to repository administrators. + */ + "repos/delete-autolink": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the autolink. */ + autolink_id: components["parameters"]["autolink-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Enables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + "repos/enable-automated-security-fixes": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Disables automated security fixes for a repository. The authenticated user must have admin access to the repository. For more information, see "[Configuring automated security fixes](https://docs.github.com/en/articles/configuring-automated-security-fixes)". */ + "repos/disable-automated-security-fixes": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "repos/list-branches": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Setting to `true` returns only protected branches. When set to `false`, only unprotected branches are returned. Omitting this parameter returns all branches. */ + protected?: boolean; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["short-branch"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/get-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-with-protection"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-protection"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Protecting a branch requires admin or owner permissions to the repository. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + * + * **Note**: The list of users, apps, and teams in total is limited to 100 items. + */ + "repos/update-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Require status checks to pass before merging. Set to `null` to disable. */ + required_status_checks: { + /** @description Require branches to be up to date before merging. */ + strict: boolean; + /** + * @deprecated + * @description **Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control. + */ + contexts: string[]; + /** @description The list of status checks to require in order to merge into this branch. */ + checks?: { + /** @description The name of the required check */ + context: string; + /** @description The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status. */ + app_id?: number; + }[]; + } | null; + /** @description Enforce all configured restrictions for administrators. Set to `true` to enforce required status checks for repository administrators. Set to `null` to disable. */ + enforce_admins: boolean | null; + /** @description Require at least one approving review on a pull request, before merging. Set to `null` to disable. */ + required_pull_request_reviews: { + /** @description Specify which users, teams, and apps can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ + dismissal_restrictions?: { + /** @description The list of user `login`s with dismissal access */ + users?: string[]; + /** @description The list of team `slug`s with dismissal access */ + teams?: string[]; + /** @description The list of app `slug`s with dismissal access */ + apps?: string[]; + }; + /** @description Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ + dismiss_stale_reviews?: boolean; + /** @description Blocks merging pull requests until [code owners](https://docs.github.com/articles/about-code-owners/) review them. */ + require_code_owner_reviews?: boolean; + /** @description Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. */ + required_approving_review_count?: number; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of user `login`s allowed to bypass pull request requirements. */ + users?: string[]; + /** @description The list of team `slug`s allowed to bypass pull request requirements. */ + teams?: string[]; + /** @description The list of app `slug`s allowed to bypass pull request requirements. */ + apps?: string[]; + }; + } | null; + /** @description Restrict who can push to the protected branch. User, app, and team `restrictions` are only available for organization-owned repositories. Set to `null` to disable. */ + restrictions: { + /** @description The list of user `login`s with push access */ + users: string[]; + /** @description The list of team `slug`s with push access */ + teams: string[]; + /** @description The list of app `slug`s with push access */ + apps?: string[]; + } | null; + /** @description Enforces a linear commit Git history, which prevents anyone from pushing merge commits to a branch. Set to `true` to enforce a linear commit history. Set to `false` to disable a linear commit Git history. Your repository must allow squash merging or rebase merging before you can enable a linear commit history. Default: `false`. For more information, see "[Requiring a linear commit history](https://docs.github.com/github/administering-a-repository/requiring-a-linear-commit-history)" in the GitHub Help documentation. */ + required_linear_history?: boolean; + /** @description Permits force pushes to the protected branch by anyone with write access to the repository. Set to `true` to allow force pushes. Set to `false` or `null` to block force pushes. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://docs.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation." */ + allow_force_pushes?: boolean | null; + /** @description Allows deletion of the protected branch by anyone with write access to the repository. Set to `false` to prevent deletion of the protected branch. Default: `false`. For more information, see "[Enabling force pushes to a protected branch](https://docs.github.com/en/github/administering-a-repository/enabling-force-pushes-to-a-protected-branch)" in the GitHub Help documentation. */ + allow_deletions?: boolean; + /** @description If set to `true`, the `restrictions` branch protection settings which limits who can push will also block pushes which create new branches, unless the push is initiated by a user, team, or app which has the ability to push. Set to `true` to restrict new branch creation. Default: `false`. */ + block_creations?: boolean; + /** @description Requires all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. Set to `false` to disable. Default: `false`. */ + required_conversation_resolution?: boolean; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/delete-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-admin-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Adding admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/set-admin-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removing admin enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/delete-admin-branch-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-pull-request-review-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-pull-request-review"]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/delete-pull-request-review-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating pull request review enforcement requires admin or owner permissions to the repository and branch protection to be enabled. + * + * **Note**: Passing new arrays of `users` and `teams` replaces their previous values. + */ + "repos/update-pull-request-review-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-pull-request-review"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Specify which users, teams, and apps can dismiss pull request reviews. Pass an empty `dismissal_restrictions` object to disable. User and team `dismissal_restrictions` are only available for organization-owned repositories. Omit this parameter for personal repositories. */ + dismissal_restrictions?: { + /** @description The list of user `login`s with dismissal access */ + users?: string[]; + /** @description The list of team `slug`s with dismissal access */ + teams?: string[]; + /** @description The list of app `slug`s with dismissal access */ + apps?: string[]; + }; + /** @description Set to `true` if you want to automatically dismiss approving reviews when someone pushes a new commit. */ + dismiss_stale_reviews?: boolean; + /** @description Blocks merging pull requests until [code owners](https://docs.github.com/articles/about-code-owners/) have reviewed. */ + require_code_owner_reviews?: boolean; + /** @description Specifies the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. */ + required_approving_review_count?: number; + /** @description Allow specific users, teams, or apps to bypass pull request requirements. */ + bypass_pull_request_allowances?: { + /** @description The list of user `login`s allowed to bypass pull request requirements. */ + users?: string[]; + /** @description The list of team `slug`s allowed to bypass pull request requirements. */ + teams?: string[]; + /** @description The list of app `slug`s allowed to bypass pull request requirements. */ + apps?: string[]; + }; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to check whether a branch requires signed commits. An enabled status of `true` indicates you must sign commits on this branch. For more information, see [Signing commits with GPG](https://docs.github.com/articles/signing-commits-with-gpg) in GitHub Help. + * + * **Note**: You must enable branch protection to require signed commits. + */ + "repos/get-commit-signature-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to require signed commits on a branch. You must enable branch protection to require signed commits. + */ + "repos/create-commit-signature-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["protected-branch-admin-enforced"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * When authenticated with admin or owner permissions to the repository, you can use this endpoint to disable required signed commits on a branch. You must enable branch protection to require signed commits. + */ + "repos/delete-commit-signature-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-status-checks-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["status-check-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/remove-status-check-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Updating required status checks requires admin or owner permissions to the repository and branch protection to be enabled. + */ + "repos/update-status-check-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["status-check-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Require branches to be up to date before merging. */ + strict?: boolean; + /** + * @deprecated + * @description **Deprecated**: The list of status checks to require in order to merge into this branch. If any of these checks have recently been set by a particular GitHub App, they will be required to come from that app in future for the branch to merge. Use `checks` instead of `contexts` for more fine-grained control. + */ + contexts?: string[]; + /** @description The list of status checks to require in order to merge into this branch. */ + checks?: { + /** @description The name of the required check */ + context: string; + /** @description The ID of the GitHub App that must provide this check. Omit this field to automatically select the GitHub App that has recently provided this check, or any app if it was not set by a GitHub App. Pass -1 to explicitly allow any app to set the status. */ + app_id?: number; + }[]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/get-all-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/set-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the status checks */ + contexts: string[]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/add-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the status checks */ + contexts: string[]; + }; + }; + }; + }; + /** Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "repos/remove-status-check-contexts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": string[]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the status checks */ + contexts: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists who has access to this protected branch. + * + * **Note**: Users, apps, and teams `restrictions` are only available for organization-owned repositories. + */ + "repos/get-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-restriction-policy"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Disables the ability to restrict who can push to this branch. + */ + "repos/delete-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the GitHub Apps that have push access to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + */ + "repos/get-apps-with-access-to-protected-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of apps that have push access to this branch. This removes all apps that previously had push access and grants push access to the new list of apps. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-app-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description apps parameter */ + apps: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified apps push access for this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-app-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description apps parameter */ + apps: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of an app to push to this branch. Only installed GitHub Apps with `write` access to the `contents` permission can be added as authorized actors on a protected branch. + * + * | Type | Description | + * | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | The GitHub Apps that have push access to this branch. Use the app's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-app-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["integration"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description apps parameter */ + apps: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the teams who have push access to this branch. The list includes child teams. + */ + "repos/get-teams-with-access-to-protected-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of teams that have push access to this branch. This removes all teams that previously had push access and grants push access to the new list of teams. Team restrictions include child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-team-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The slug values for teams */ + teams: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified teams push access for this branch. You can also give push access to child teams. + * + * | Type | Description | + * | ------- | ------------------------------------------------------------------------------------------------------------------------------------------ | + * | `array` | The teams that can have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-team-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The slug values for teams */ + teams: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a team to push to this branch. You can also remove push access for child teams. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Teams that should no longer have push access. Use the team's `slug`. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-team-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The slug values for teams */ + teams: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists the people who have push access to this branch. + */ + "repos/get-users-with-access-to-protected-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Replaces the list of people that have push access to this branch. This removes all people that previously had push access and grants push access to the new list of people. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/set-user-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The username for users */ + users: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Grants the specified people push access for this branch. + * + * | Type | Description | + * | ------- | ----------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames for people who can have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/add-user-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The username for users */ + users: string[]; + }; + }; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Removes the ability of a user to push to this branch. + * + * | Type | Description | + * | ------- | --------------------------------------------------------------------------------------------------------------------------------------------- | + * | `array` | Usernames of the people who should no longer have push access. **Note**: The list of users, apps, and teams in total is limited to 100 items. | + */ + "repos/remove-user-access-restrictions": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The username for users */ + users: string[]; + }; + }; + }; + }; + /** + * Renames a branch in a repository. + * + * **Note:** Although the API responds immediately, the branch rename process might take some extra time to complete in the background. You won't be able to push to the old branch name while the rename process is in progress. For more information, see "[Renaming a branch](https://docs.github.com/github/administering-a-repository/renaming-a-branch)". + * + * The permissions required to use this endpoint depends on whether you are renaming the default branch. + * + * To rename a non-default branch: + * + * * Users must have push access. + * * GitHub Apps must have the `contents:write` repository permission. + * + * To rename the default branch: + * + * * Users must have admin or owner permissions. + * * GitHub Apps must have the `administration:write` repository permission. + */ + "repos/rename-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the branch. */ + branch: components["parameters"]["branch"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["branch-with-protection"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The new name of the branch. */ + new_name: string; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Creates a new check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to create check runs. + * + * In a check suite, GitHub limits the number of check runs with the same name to 1000. Once these check runs exceed 1000, GitHub will start to automatically delete older check runs. + */ + "checks/create": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + requestBody: { + content: { + "application/json": ( + | ({ + /** @enum {undefined} */ + status: "completed"; + } & { + conclusion: unknown; + } & { [key: string]: unknown }) + | ({ + /** @enum {undefined} */ + status?: "queued" | "in_progress"; + } & { [key: string]: unknown }) + ) & { + /** @description The name of the check. For example, "code-coverage". */ + name: string; + /** @description The SHA of the commit. */ + head_sha: string; + /** @description The URL of the integrator's site that has the full details of the check. If the integrator does not provide this, then the homepage of the GitHub app is used. */ + details_url?: string; + /** @description A reference for the run on the integrator's system. */ + external_id?: string; + /** + * @description The current status. + * @default queued + * @enum {string} + */ + status?: "queued" | "in_progress" | "completed"; + /** + * Format: date-time + * @description The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** + * @description **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. + * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. + * @enum {string} + */ + conclusion?: + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "success" + | "skipped" + | "stale" + | "timed_out"; + /** + * Format: date-time + * @description The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at?: string; + /** @description Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object) description. */ + output?: { + /** @description The title of the check run. */ + title: string; + /** @description The summary of the check run. This parameter supports Markdown. */ + summary: string; + /** @description The details of the check run. This parameter supports Markdown. */ + text?: string; + /** @description Adds information from your analysis to specific lines of code. Annotations are visible on GitHub in the **Checks** and **Files changed** tab of the pull request. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about how you can view annotations on GitHub, see "[About status checks](https://docs.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object) description for details about how to use this parameter. */ + annotations?: { + /** @description The path of the file to add an annotation to. For example, `assets/css/main.css`. */ + path: string; + /** @description The start line of the annotation. */ + start_line: number; + /** @description The end line of the annotation. */ + end_line: number; + /** @description The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + start_column?: number; + /** @description The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + end_column?: number; + /** + * @description The level of the annotation. + * @enum {string} + */ + annotation_level: "notice" | "warning" | "failure"; + /** @description A short description of the feedback for these lines of code. The maximum size is 64 KB. */ + message: string; + /** @description The title that represents the annotation. The maximum size is 255 characters. */ + title?: string; + /** @description Details about this annotation. The maximum size is 64 KB. */ + raw_details?: string; + }[]; + /** @description Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#images-object) description for details. */ + images?: { + /** @description The alternative text for the image. */ + alt: string; + /** @description The full URL of the image. */ + image_url: string; + /** @description A short image description. */ + caption?: string; + }[]; + }; + /** @description Displays a button on GitHub that can be clicked to alert your app to do additional tasks. For example, a code linting app can display a button that automatically fixes detected errors. The button created in this object is displayed after the check run completes. When a user clicks the button, GitHub sends the [`check_run.requested_action` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) to your app. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ + actions?: { + /** @description The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ + label: string; + /** @description A short explanation of what this action would do. The maximum size is 40 characters. */ + description: string; + /** @description A reference for the action on the integrator's system. The maximum size is 20 characters. */ + identifier: string; + }[]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Gets a single check run using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Updates a check run for a specific commit in a repository. Your GitHub App must have the `checks:write` permission to edit check runs. + */ + "checks/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-run"]; + }; + }; + }; + requestBody: { + content: { + "application/json": (Partial< + { + /** @enum {undefined} */ + status?: "completed"; + } & { + conclusion: unknown; + } & { [key: string]: unknown } + > & + Partial< + { + /** @enum {undefined} */ + status?: "queued" | "in_progress"; + } & { [key: string]: unknown } + >) & { + /** @description The name of the check. For example, "code-coverage". */ + name?: string; + /** @description The URL of the integrator's site that has the full details of the check. */ + details_url?: string; + /** @description A reference for the run on the integrator's system. */ + external_id?: string; + /** + * Format: date-time + * @description This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** + * @description The current status. + * @enum {string} + */ + status?: "queued" | "in_progress" | "completed"; + /** + * @description **Required if you provide `completed_at` or a `status` of `completed`**. The final conclusion of the check. + * **Note:** Providing `conclusion` will automatically set the `status` parameter to `completed`. You cannot change a check run conclusion to `stale`, only GitHub can set this. + * @enum {string} + */ + conclusion?: + | "action_required" + | "cancelled" + | "failure" + | "neutral" + | "success" + | "skipped" + | "stale" + | "timed_out"; + /** + * Format: date-time + * @description The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at?: string; + /** @description Check runs can accept a variety of data in the `output` object, including a `title` and `summary` and can optionally provide descriptive details about the run. See the [`output` object](https://docs.github.com/rest/reference/checks#output-object-1) description. */ + output?: { + /** @description **Required**. */ + title?: string; + /** @description Can contain Markdown. */ + summary: string; + /** @description Can contain Markdown. */ + text?: string; + /** @description Adds information from your analysis to specific lines of code. Annotations are visible in GitHub's pull request UI. Annotations are visible in GitHub's pull request UI. The Checks API limits the number of annotations to a maximum of 50 per API request. To create more than 50 annotations, you have to make multiple requests to the [Update a check run](https://docs.github.com/rest/reference/checks#update-a-check-run) endpoint. Each time you update the check run, annotations are appended to the list of annotations that already exist for the check run. For details about annotations in the UI, see "[About status checks](https://docs.github.com/articles/about-status-checks#checks)". See the [`annotations` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details. */ + annotations?: { + /** @description The path of the file to add an annotation to. For example, `assets/css/main.css`. */ + path: string; + /** @description The start line of the annotation. */ + start_line: number; + /** @description The end line of the annotation. */ + end_line: number; + /** @description The start column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + start_column?: number; + /** @description The end column of the annotation. Annotations only support `start_column` and `end_column` on the same line. Omit this parameter if `start_line` and `end_line` have different values. */ + end_column?: number; + /** + * @description The level of the annotation. + * @enum {string} + */ + annotation_level: "notice" | "warning" | "failure"; + /** @description A short description of the feedback for these lines of code. The maximum size is 64 KB. */ + message: string; + /** @description The title that represents the annotation. The maximum size is 255 characters. */ + title?: string; + /** @description Details about this annotation. The maximum size is 64 KB. */ + raw_details?: string; + }[]; + /** @description Adds images to the output displayed in the GitHub pull request UI. See the [`images` object](https://docs.github.com/rest/reference/checks#annotations-object-1) description for details. */ + images?: { + /** @description The alternative text for the image. */ + alt: string; + /** @description The full URL of the image. */ + image_url: string; + /** @description A short image description. */ + caption?: string; + }[]; + }; + /** @description Possible further actions the integrator can perform, which a user may trigger. Each action includes a `label`, `identifier` and `description`. A maximum of three actions are accepted. See the [`actions` object](https://docs.github.com/rest/reference/checks#actions-object) description. To learn more about check runs and requested actions, see "[Check runs and requested actions](https://docs.github.com/rest/reference/checks#check-runs-and-requested-actions)." */ + actions?: { + /** @description The text to be displayed on a button in the web UI. The maximum size is 20 characters. */ + label: string; + /** @description A short explanation of what this action would do. The maximum size is 40 characters. */ + description: string; + /** @description A reference for the action on the integrator's system. The maximum size is 20 characters. */ + identifier: string; + }[]; + }; + }; + }; + }; + /** Lists annotations for a check run using the annotation `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get annotations for a check run. OAuth Apps and authenticated users must have the `repo` scope to get annotations for a check run in a private repository. */ + "checks/list-annotations": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["check-annotation"][]; + }; + }; + }; + }; + /** + * Triggers GitHub to rerequest an existing check run, without pushing new code to a repository. This endpoint will trigger the [`check_run` webhook](https://docs.github.com/webhooks/event-payloads/#check_run) event with the action `rerequested`. When a check run is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check run, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + "checks/rerequest-run": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check run. */ + check_run_id: components["parameters"]["check-run-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Forbidden if the check run is not rerequestable or doesn't belong to the authenticated GitHub App */ + 403: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 404: components["responses"]["not_found"]; + /** Validation error if the check run is not rerequestable */ + 422: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * By default, check suites are automatically created when you create a [check run](https://docs.github.com/rest/reference/checks#check-runs). You only need to use this endpoint for manually creating check suites when you've disabled automatic creation using "[Update repository preferences for check suites](https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites)". Your GitHub App must have the `checks:write` permission to create check suites. + */ + "checks/create-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response when the suite already exists */ + 200: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + /** Response when the suite was created */ + 201: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The sha of the head commit. */ + head_sha: string; + }; + }; + }; + }; + /** Changes the default automatic flow when creating check suites. By default, a check suite is automatically created each time code is pushed to a repository. When you disable the automatic creation of check suites, you can manually [Create a check suite](https://docs.github.com/rest/reference/checks#create-a-check-suite). You must have admin permissions in the repository to set preferences for check suites. */ + "checks/set-suites-preferences": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-suite-preference"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Enables or disables automatic creation of CheckSuite events upon pushes to the repository. Enabled by default. See the [`auto_trigger_checks` object](https://docs.github.com/rest/reference/checks#auto_trigger_checks-object) description for details. */ + auto_trigger_checks?: { + /** @description The `id` of the GitHub App. */ + app_id: number; + /** + * @description Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository, or `false` to disable them. + * @default true + */ + setting: boolean; + }[]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Gets a single check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + "checks/get-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check suite. */ + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["check-suite"]; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a check suite using its `id`. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/list-for-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check suite. */ + check_suite_id: components["parameters"]["check-suite-id"]; + }; + query: { + /** Returns check runs with the specified `name`. */ + check_name?: components["parameters"]["check-name"]; + /** Returns check runs with the specified `status`. */ + status?: components["parameters"]["status"]; + /** Filters check runs by their `completed_at` timestamp. `latest` returns the most recent check runs. */ + filter?: "latest" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + check_runs: components["schemas"]["check-run"][]; + }; + }; + }; + }; + }; + /** + * Triggers GitHub to rerequest an existing check suite, without pushing new code to a repository. This endpoint will trigger the [`check_suite` webhook](https://docs.github.com/webhooks/event-payloads/#check_suite) event with the action `rerequested`. When a check suite is `rerequested`, its `status` is reset to `queued` and the `conclusion` is cleared. + * + * To rerequest a check suite, your GitHub App must have the `checks:read` permission on a private repository or pull access to a public repository. + */ + "checks/rerequest-suite": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the check suite. */ + check_suite_id: components["parameters"]["check-suite-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + }; + }; + /** + * Lists all open code scanning alerts for the default branch (usually `main` + * or `master`). You must use an access token with the `security_events` scope to use + * this endpoint with private repos, the `public_repo` scope also grants permission to read + * security events on public repos only. GitHub Apps must have the `security_events` read + * permission to use this endpoint. + * + * The response includes a `most_recent_instance` object. + * This provides details of the most recent instance of this alert + * for the default branch or for the specified Git reference + * (if you used `ref` in the request). + */ + "code-scanning/list-alerts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The property by which to sort the results. . `number` is deprecated - we recommend that you use `created` instead. */ + sort?: "created" | "number" | "updated"; + /** Set to `open`, `closed, `fixed`, or `dismissed` to list code scanning alerts in a specific state. */ + state?: components["schemas"]["code-scanning-alert-state"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert-items"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The instances field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The same information can now be retrieved via a GET request to the URL specified by `instances_url`. + */ + "code-scanning/get-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** Updates the status of a single code scanning alert. You must use an access token with the `security_events` scope to use this endpoint with private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. */ + "code-scanning/update-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["code-scanning-alert-set-state"]; + dismissed_reason?: components["schemas"]["code-scanning-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["code-scanning-alert-dismissed-comment"]; + }; + }; + }; + }; + /** + * Lists all instances of the specified code scanning alert. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + */ + "code-scanning/list-alert-instances": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The Git reference for the results you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["parameters"]["git-ref"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-alert-instance"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the details of all code scanning analyses for a repository, + * starting with the most recent. + * The response is paginated and you can use the `page` and `per_page` parameters + * to list the analyses you're interested in. + * By default 30 analyses are listed per page. + * + * The `rules_count` field in the response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * **Deprecation notice**: + * The `tool_name` field is deprecated and will, in future, not be included in the response for this endpoint. The example response reflects this change. The tool name can now be found inside the `tool` field. + */ + "code-scanning/list-recent-analyses": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either `tool_name` or `tool_guid`, but not both. */ + tool_name?: components["parameters"]["tool-name"]; + /** The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either `tool_guid` or `tool_name`, but not both. */ + tool_guid?: components["parameters"]["tool-guid"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The Git reference for the analyses you want to list. The `ref` for a branch can be formatted either as `refs/heads/` or simply ``. To reference a pull request use `refs/pull//merge`. */ + ref?: components["schemas"]["code-scanning-ref"]; + /** Filter analyses belonging to the same SARIF upload. */ + sarif_id?: components["schemas"]["code-scanning-analysis-sarif-id"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The property by which to sort the results. */ + sort?: "created"; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a specified code scanning analysis for a repository. + * You must use an access token with the `security_events` scope to use this endpoint with private repos, + * the `public_repo` scope also grants permission to read security events on public repos only. + * GitHub Apps must have the `security_events` read permission to use this endpoint. + * + * The default JSON response contains fields that describe the analysis. + * This includes the Git reference and commit SHA to which the analysis relates, + * the datetime of the analysis, the name of the code scanning tool, + * and the number of alerts. + * + * The `rules_count` field in the default response give the number of rules + * that were run in the analysis. + * For very old analyses this data is not available, + * and `0` is returned in this field. + * + * If you use the Accept header `application/sarif+json`, + * the response contains the analysis data that was uploaded. + * This is formatted as + * [SARIF version 2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html). + */ + "code-scanning/get-analysis": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ + analysis_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis"]; + "application/json+sarif": { [key: string]: unknown }; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Deletes a specified code scanning analysis from a repository. For + * private repositories, you must use an access token with the `repo` scope. For public repositories, + * you must use an access token with `public_repo` scope. + * GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * You can delete one analysis at a time. + * To delete a series of analyses, start with the most recent analysis and work backwards. + * Conceptually, the process is similar to the undo function in a text editor. + * + * When you list the analyses for a repository, + * one or more will be identified as deletable in the response: + * + * ``` + * "deletable": true + * ``` + * + * An analysis is deletable when it's the most recent in a set of analyses. + * Typically, a repository will have multiple sets of analyses + * for each enabled code scanning tool, + * where a set is determined by a unique combination of analysis values: + * + * * `ref` + * * `tool` + * * `category` + * + * If you attempt to delete an analysis that is not the most recent in a set, + * you'll get a 400 response with the message: + * + * ``` + * Analysis specified is not deletable. + * ``` + * + * The response from a successful `DELETE` operation provides you with + * two alternative URLs for deleting the next analysis in the set: + * `next_analysis_url` and `confirm_delete_url`. + * Use the `next_analysis_url` URL if you want to avoid accidentally deleting the final analysis + * in a set. This is a useful option if you want to preserve at least one analysis + * for the specified tool in your repository. + * Use the `confirm_delete_url` URL if you are content to remove all analyses for a tool. + * When you delete the last analysis in a set, the value of `next_analysis_url` and `confirm_delete_url` + * in the 200 response is `null`. + * + * As an example of the deletion process, + * let's imagine that you added a workflow that configured a particular code scanning tool + * to analyze the code in a repository. This tool has added 15 analyses: + * 10 on the default branch, and another 5 on a topic branch. + * You therefore have two separate sets of analyses for this tool. + * You've now decided that you want to remove all of the analyses for the tool. + * To do this you must make 15 separate deletion requests. + * To start, you must find an analysis that's identified as deletable. + * Each set of analyses always has one that's identified as deletable. + * Having found the deletable analysis for one of the two sets, + * delete this analysis and then continue deleting the next analysis in the set until they're all deleted. + * Then repeat the process for the second set. + * The procedure therefore consists of a nested loop: + * + * **Outer loop**: + * * List the analyses for the repository, filtered by tool. + * * Parse this list to find a deletable analysis. If found: + * + * **Inner loop**: + * * Delete the identified analysis. + * * Parse the response for the value of `confirm_delete_url` and, if found, use this in the next iteration. + * + * The above process assumes that you want to remove all trace of the tool's analyses from the GitHub user interface, for the specified repository, and it therefore uses the `confirm_delete_url` value. Alternatively, you could use the `next_analysis_url` value, which would leave the last analysis in each set undeleted to avoid removing a tool's analysis entirely. + */ + "code-scanning/delete-analysis": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The ID of the analysis, as returned from the `GET /repos/{owner}/{repo}/code-scanning/analyses` operation. */ + analysis_id: number; + }; + query: { + /** Allow deletion if the specified analysis is the last in a set. If you attempt to delete the final analysis in a set without setting this parameter to `true`, you'll get a 400 response with the message: `Analysis is last of its type and deletion may result in the loss of historical alert data. Please specify confirm_delete.` */ + confirm_delete?: string | null; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-analysis-deletion"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the CodeQL databases that are available in a repository. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + "code-scanning/list-codeql-databases": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-codeql-database"][]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a CodeQL database for a language in a repository. + * + * By default this endpoint returns JSON metadata about the CodeQL database. To + * download the CodeQL database binary content, set the `Accept` header of the request + * to [`application/zip`](https://docs.github.com/rest/overview/media-types), and make sure + * your HTTP client is configured to follow redirects or use the `Location` header + * to make a second request to get the redirect URL. + * + * For private repositories, you must use an access token with the `security_events` scope. + * For public repositories, you can use tokens with the `security_events` or `public_repo` scope. + * GitHub Apps must have the `contents` read permission to use this endpoint. + */ + "code-scanning/get-codeql-database": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The language of the CodeQL database. */ + language: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-codeql-database"]; + }; + }; + 302: components["responses"]["found"]; + 403: components["responses"]["code_scanning_forbidden_read"]; + 404: components["responses"]["not_found"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Uploads SARIF data containing the results of a code scanning analysis to make the results available in a repository. You must use an access token with the `security_events` scope to use this endpoint for private repositories. You can also use tokens with the `public_repo` scope for public repositories only. GitHub Apps must have the `security_events` write permission to use this endpoint. + * + * There are two places where you can upload code scanning results. + * - If you upload to a pull request, for example `--ref refs/pull/42/merge` or `--ref refs/pull/42/head`, then the results appear as alerts in a pull request check. For more information, see "[Triaging code scanning alerts in pull requests](/code-security/secure-coding/triaging-code-scanning-alerts-in-pull-requests)." + * - If you upload to a branch, for example `--ref refs/heads/my-branch`, then the results appear in the **Security** tab for your repository. For more information, see "[Managing code scanning alerts for your repository](/code-security/secure-coding/managing-code-scanning-alerts-for-your-repository#viewing-the-alerts-for-a-repository)." + * + * You must compress the SARIF-formatted analysis data that you want to upload, using `gzip`, and then encode it as a Base64 format string. For example: + * + * ``` + * gzip -c analysis-data.sarif | base64 -w0 + * ``` + * + * SARIF upload supports a maximum of 5000 results per analysis run. Any results over this limit are ignored and any SARIF uploads with more than 25,000 results are rejected. Typically, but not necessarily, a SARIF file contains a single run of a single tool. If a code scanning tool generates too many results, you should update the analysis configuration to run only the most important rules or queries. + * + * The `202 Accepted`, response includes an `id` value. + * You can use this ID to check the status of the upload by using this for the `/sarifs/{sarif_id}` endpoint. + * For more information, see "[Get information about a SARIF upload](/rest/reference/code-scanning#get-information-about-a-sarif-upload)." + */ + "code-scanning/upload-sarif": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["code-scanning-sarifs-receipt"]; + }; + }; + /** Bad Request if the sarif field is invalid */ + 400: unknown; + 403: components["responses"]["code_scanning_forbidden_write"]; + 404: components["responses"]["not_found"]; + /** Payload Too Large if the sarif field is too large */ + 413: unknown; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + commit_sha: components["schemas"]["code-scanning-analysis-commit-sha"]; + ref: components["schemas"]["code-scanning-ref"]; + sarif: components["schemas"]["code-scanning-analysis-sarif-file"]; + /** + * Format: uri + * @description The base directory used in the analysis, as it appears in the SARIF file. + * This property is used to convert file paths from absolute to relative, so that alerts can be mapped to their correct location in the repository. + * @example file:///github/workspace/ + */ + checkout_uri?: string; + /** + * Format: date-time + * @description The time that the analysis run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at?: string; + /** @description The name of the tool used to generate the code scanning analysis. If this parameter is not used, the tool name defaults to "API". If the uploaded SARIF contains a tool GUID, this will be available for filtering using the `tool_guid` parameter of operations such as `GET /repos/{owner}/{repo}/code-scanning/alerts`. */ + tool_name?: string; + }; + }; + }; + }; + /** Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis. For more information, see "[Get a code scanning analysis for a repository](/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository)." You must use an access token with the `security_events` scope to use this endpoint with private repos, the `public_repo` scope also grants permission to read security events on public repos only. GitHub Apps must have the `security_events` read permission to use this endpoint. */ + "code-scanning/get-sarif": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SARIF ID obtained after uploading. */ + sarif_id: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["code-scanning-sarifs-status"]; + }; + }; + 403: components["responses"]["code_scanning_forbidden_read"]; + /** Not Found if the sarif id does not match any upload */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * List any syntax errors that are detected in the CODEOWNERS + * file. + * + * For more information about the correct CODEOWNERS syntax, + * see "[About code owners](https://docs.github.com/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)." + */ + "repos/codeowners-errors": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** A branch, tag or commit name used to determine which version of the CODEOWNERS file to use. Default: the repository's default branch (e.g. `main`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codeowners-errors"]; + }; + }; + /** Resource not found */ + 404: unknown; + }; + }; + /** + * Lists the codespaces associated to a specified repository and the authenticated user. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/list-in-repository-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Creates a codespace owned by the authenticated user in the specified repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-with-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Git ref (typically a branch name) for this codespace */ + ref?: string; + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } | null; + }; + }; + }; + /** + * Lists the devcontainer.json files associated with a specified repository and the authenticated user. These files + * specify launchpoint configurations for codespaces created within the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/list-devcontainers-in-repository-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + devcontainers: { + path: string; + name?: string; + }[]; + }; + }; + }; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * List the machine types available for a given repository based on its configuration. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/repo-machines-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The location to check for available machines. Assigned by IP if not provided. */ + location?: string; + /** IP for location auto-detection when proxying a request */ + client_ip?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + machines: components["schemas"]["codespace-machine"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets the default attributes for codespaces created by the user with the repository. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/pre-flight-with-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The branch or commit to check for a default devcontainer path. If not specified, the default branch will be checked. */ + ref?: string; + /** An alternative IP for default location auto-detection, such as when proxying a request. */ + client_ip?: string; + }; + }; + responses: { + /** Response when a user is able to create codespaces from the repository. */ + 200: { + content: { + "application/json": { + billable_owner?: components["schemas"]["simple-user"]; + defaults?: { + location: string; + devcontainer_path: string | null; + }; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/list-repo-secrets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["repo-codespaces-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/get-repo-public-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-public-key"]; + }; + }; + }; + }; + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/get-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repo-codespaces-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository + * permission to use this endpoint. + * + * #### Example of encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example of encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example of encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example of encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + }; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `codespaces_secrets` repository permission to use this endpoint. */ + "codespaces/delete-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * Organization members with write, maintain, or admin privileges on the organization-owned repository can use this endpoint. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + "repos/list-collaborators": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Filter collaborators returned by their affiliation. `outside` means all outside collaborators of an organization-owned repository. `direct` means all collaborators with permissions to an organization-owned repository, regardless of organization membership status. `all` means all collaborators the authenticated user can see. */ + affiliation?: "outside" | "direct" | "all"; + /** Filter collaborators by the permissions they have on the repository. If not specified, all collaborators will be returned. */ + permission?: "pull" | "triage" | "push" | "maintain" | "admin"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["collaborator"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * For organization-owned repositories, the list of collaborators includes outside collaborators, organization members that are direct collaborators, organization members with access through team memberships, organization members with access through default organization permissions, and organization owners. + * + * Team members will include the members of child teams. + * + * You must authenticate using an access token with the `read:org` and `repo` scopes with push access to use this + * endpoint. GitHub Apps must have the `members` organization permission and `metadata` repository permission to use this + * endpoint. + */ + "repos/check-collaborator": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response if user is a collaborator */ + 204: never; + /** Not Found if user is not a collaborator */ + 404: unknown; + }; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Adding an outside collaborator may be restricted by enterprise administrators. For more information, see "[Enforcing repository management policies in your enterprise](https://docs.github.com/enterprise-cloud@latest/admin/policies/enforcing-policies-for-your-enterprise/enforcing-repository-management-policies-in-your-enterprise#enforcing-a-policy-for-inviting-outside-collaborators-to-repositories)." + * + * For more information on permission levels, see "[Repository permission levels for an organization](https://docs.github.com/github/setting-up-and-managing-organizations-and-teams/repository-permission-levels-for-an-organization#permission-levels-for-repositories-owned-by-an-organization)". There are restrictions on which permissions can be granted to organization members when an organization base role is in place. In this case, the permission being given must be equal to or higher than the org base permission. Otherwise, the request will fail with: + * + * ``` + * Cannot assign {member} permission of {role name} + * ``` + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * The invitee will receive a notification that they have been invited to the repository, which they must accept or decline. They may do this via the notifications page, the email they receive, or by using the [repository invitations API endpoints](https://docs.github.com/rest/reference/repos#invitations). + * + * **Updating an existing collaborator's permission level** + * + * The endpoint can also be used to change the permissions of an existing collaborator without first removing and re-adding the collaborator. To change the permissions, use the same endpoint and pass a different `permission` parameter. The response will be a `204`, with no other indication that the permission level changed. + * + * **Rate limits** + * + * You are limited to sending 50 invitations to a repository per 24 hour period. Note there is no limit if you are inviting organization members to an organization repository. + */ + "repos/add-collaborator": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response when a new invitation is created */ + 201: { + content: { + "application/json": components["schemas"]["repository-invitation"]; + }; + }; + /** + * Response when: + * - an existing collaborator is added as a collaborator + * - an organization member is added as an individual collaborator + * - an existing team member (whose team is also a repository collaborator) is added as an individual collaborator + */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the collaborator. **Only valid on organization-owned repositories.** We accept the following permissions to be set: `pull`, `triage`, `push`, `maintain`, `admin` and you can also specify a custom repository role name, if the owning organization has defined any. + * @default push + */ + permission?: string; + }; + }; + }; + }; + "repos/remove-collaborator": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Checks the repository permission of a collaborator. The possible repository permissions are `admin`, `write`, `read`, and `none`. */ + "repos/get-collaborator-permission-level": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** if user has admin permissions */ + 200: { + content: { + "application/json": components["schemas"]["repository-collaborator-permission"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Commit Comments use [these custom media types](https://docs.github.com/rest/reference/repos#custom-media-types). You can read more about the use of media types in the API [here](https://docs.github.com/rest/overview/media-types/). + * + * Comments are ordered by ascending ID. + */ + "repos/list-commit-comments-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit-comment"][]; + }; + }; + }; + }; + "repos/get-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/delete-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + "repos/update-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment */ + body: string; + }; + }; + }; + }; + /** List the reactions to a [commit comment](https://docs.github.com/rest/reference/repos#comments). */ + "reactions/list-for-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a commit comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). A response with an HTTP `200` status means that you already added the reaction type to this commit comment. */ + "reactions/create-for-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the commit comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to a [commit comment](https://docs.github.com/rest/reference/repos#comments). + */ + "reactions/delete-for-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/list-commits": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** SHA or branch to start listing commits from. Default: the repository’s default branch (usually `master`). */ + sha?: string; + /** Only commits containing this file path will be returned. */ + path?: string; + /** GitHub login or email address by which to filter by commit author. */ + author?: string; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only commits before this date will be returned. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + until?: string; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Protected branches are available in public repositories with GitHub Free and GitHub Free for organizations, and in public and private repositories with GitHub Pro, GitHub Team, GitHub Enterprise Cloud, and GitHub Enterprise Server. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Returns all branches where the given commit SHA is the HEAD, or latest commit for the branch. + */ + "repos/list-branches-for-head-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["branch-short"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Use the `:commit_sha` to specify the commit that will have its comments listed. */ + "repos/list-comments-for-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit-comment"][]; + }; + }; + }; + }; + /** + * Create a comment for a commit using its `:commit_sha`. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "repos/create-commit-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["commit-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + /** @description Relative path of the file to comment on. */ + path?: string; + /** @description Line index in the diff to comment on. */ + position?: number; + /** @description **Deprecated**. Use **position** parameter instead. Line number in the file to comment on. */ + line?: number; + }; + }; + }; + }; + /** Lists the merged pull request that introduced the commit to the repository. If the commit is not present in the default branch, additionally returns open pull requests associated with the commit. The results may include open and closed pull requests. */ + "repos/list-pull-requests-associated-with-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-simple"][]; + }; + }; + }; + }; + /** + * Returns the contents of a single commit reference. You must have `read` access for the repository to use this endpoint. + * + * **Note:** If there are more than 300 files in the commit diff, the response will include pagination link headers for the remaining files, up to a limit of 3000 files. Each page contains the static commit information, and the only changes are to the file listing. + * + * You can pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch `diff` and `patch` formats. Diffs with binary data will have no `patch` property. + * + * To return only the SHA-1 hash of the commit reference, you can provide the `sha` custom [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) in the `Accept` header. You can use this endpoint to check if a remote reference's SHA-1 hash is the same as your local reference's SHA-1 hash by providing the local SHA-1 reference as the ETag. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/get-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array. + * + * Lists check runs for a commit ref. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to get check runs. OAuth Apps and authenticated users must have the `repo` scope to get check runs in a private repository. + */ + "checks/list-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** Returns check runs with the specified `name`. */ + check_name?: components["parameters"]["check-name"]; + /** Returns check runs with the specified `status`. */ + status?: components["parameters"]["status"]; + /** Filters check runs by their `completed_at` timestamp. `latest` returns the most recent check runs. */ + filter?: "latest" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + app_id?: number; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + check_runs: components["schemas"]["check-run"][]; + }; + }; + }; + }; + }; + /** + * **Note:** The Checks API only looks for pushes in the repository where the check suite or check run were created. Pushes to a branch in a forked repository are not detected and return an empty `pull_requests` array and a `null` value for `head_branch`. + * + * Lists check suites for a commit `ref`. The `ref` can be a SHA, branch name, or a tag name. GitHub Apps must have the `checks:read` permission on a private repository or pull access to a public repository to list check suites. OAuth Apps and authenticated users must have the `repo` scope to get check suites in a private repository. + */ + "checks/list-suites-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** Filters check suites by GitHub App `id`. */ + app_id?: number; + /** Returns check runs with the specified `name`. */ + check_name?: components["parameters"]["check-name"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + check_suites: components["schemas"]["check-suite"][]; + }; + }; + }; + }; + }; + /** + * Users with pull access in a repository can access a combined view of commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. + * + * + * Additionally, a combined `state` is returned. The `state` is one of: + * + * * **failure** if any of the contexts report as `error` or `failure` + * * **pending** if there are no statuses or a context is `pending` + * * **success** if the latest status for all contexts is `success` + */ + "repos/get-combined-status-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-commit-status"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Users with pull access in a repository can view commit statuses for a given ref. The ref can be a SHA, a branch name, or a tag name. Statuses are returned in reverse chronological order. The first status in the list will be the latest one. + * + * This resource is also available via a legacy route: `GET /repos/:owner/:repo/statuses/:ref`. + */ + "repos/list-commit-statuses-for-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["status"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + }; + }; + /** + * Returns all community profile metrics for a repository. The repository must be public, and cannot be a fork. + * + * The returned metrics include an overall health score, the repository description, the presence of documentation, the + * detected code of conduct, the detected license, and the presence of ISSUE\_TEMPLATE, PULL\_REQUEST\_TEMPLATE, + * README, and CONTRIBUTING files. + * + * The `health_percentage` score is defined as a percentage of how many of + * these four documents are present: README, CONTRIBUTING, LICENSE, and + * CODE_OF_CONDUCT. For example, if all four documents are present, then + * the `health_percentage` is `100`. If only one is present, then the + * `health_percentage` is `25`. + * + * `content_reports_enabled` is only returned for organization-owned repositories. + */ + "repos/get-community-profile-metrics": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["community-profile"]; + }; + }; + }; + }; + /** + * The `basehead` param is comprised of two parts: `base` and `head`. Both must be branch names in `repo`. To compare branches across other repositories in the same network as `repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits-with-basehead": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The base branch and head branch to compare. This parameter expects the format `{base}...{head}`. */ + basehead: string; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets the contents of a file or directory in a repository. Specify the file path or directory in `:path`. If you omit + * `:path`, you will receive the contents of the repository's root directory. See the description below regarding what the API response includes for directories. + * + * Files and symlinks support [a custom media type](https://docs.github.com/rest/reference/repos#custom-media-types) for + * retrieving the raw content or rendered HTML (when supported). All content types support [a custom media + * type](https://docs.github.com/rest/reference/repos#custom-media-types) to ensure the content is returned in a consistent + * object format. + * + * **Notes**: + * * To get a repository's contents recursively, you can [recursively get the tree](https://docs.github.com/rest/reference/git#trees). + * * This API has an upper limit of 1,000 files for a directory. If you need to retrieve more files, use the [Git Trees + * API](https://docs.github.com/rest/reference/git#get-a-tree). + * * Download URLs expire and are meant to be used just once. To ensure the download URL does not expire, please use the contents API to obtain a fresh download URL for each download. + * #### Size limits + * If the requested file's size is: + * * 1 MB or smaller: All features of this endpoint are supported. + * * Between 1-100 MB: Only the `raw` or `object` [custom media types](https://docs.github.com/rest/repos/contents#custom-media-types-for-repository-contents) are supported. Both will work as normal, except that when using the `object` media type, the `content` field will be an empty string and the `encoding` field will be `"none"`. To get the contents of these larger files, use the `raw` media type. + * * Greater than 100 MB: This endpoint is not supported. + * + * #### If the content is a directory + * The response will be an array of objects, one object for each item in the directory. + * When listing the contents of a directory, submodules have their "type" specified as "file". Logically, the value + * _should_ be "submodule". This behavior exists in API v3 [for backwards compatibility purposes](https://git.io/v1YCW). + * In the next major version of the API, the type will be returned as "submodule". + * + * #### If the content is a symlink + * If the requested `:path` points to a symlink, and the symlink's target is a normal file in the repository, then the + * API responds with the content of the file (in the format shown in the example. Otherwise, the API responds with an object + * describing the symlink itself. + * + * #### If the content is a submodule + * The `submodule_git_url` identifies the location of the submodule repository, and the `sha` identifies a specific + * commit within the submodule repository. Git uses the given URL when cloning the submodule repository, and checks out + * the submodule at that specific commit. + * + * If the submodule repository is not hosted on github.com, the Git URLs (`git_url` and `_links["git"]`) and the + * github.com URLs (`html_url` and `_links["html"]`) will have null values. + */ + "repos/get-content": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** path parameter */ + path: string; + }; + query: { + /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/vnd.github.object": components["schemas"]["content-tree"]; + "application/json": + | components["schemas"]["content-directory"] + | components["schemas"]["content-file"] + | components["schemas"]["content-symlink"] + | components["schemas"]["content-submodule"]; + }; + }; + 302: components["responses"]["found"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Creates a new file or replaces an existing file in a repository. You must authenticate using an access token with the `workflow` scope to use this endpoint. + * + * **Note:** If you use this endpoint and the "[Delete a file](https://docs.github.com/rest/reference/repos/#delete-file)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + "repos/create-or-update-file-contents": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** path parameter */ + path: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message. */ + message: string; + /** @description The new file content, using Base64 encoding. */ + content: string; + /** @description **Required if you are updating a file**. The blob SHA of the file being replaced. */ + sha?: string; + /** @description The branch name. Default: the repository’s default branch (usually `master`) */ + branch?: string; + /** @description The person that committed the file. Default: the authenticated user. */ + committer?: { + /** @description The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ + name: string; + /** @description The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ + email: string; + /** @example "2013-01-05T13:13:22+05:00" */ + date?: string; + }; + /** @description The author of the file. Default: The `committer` or the authenticated user if you omit `committer`. */ + author?: { + /** @description The name of the author or committer of the commit. You'll receive a `422` status code if `name` is omitted. */ + name: string; + /** @description The email of the author or committer of the commit. You'll receive a `422` status code if `email` is omitted. */ + email: string; + /** @example "2013-01-15T17:13:22+05:00" */ + date?: string; + }; + }; + }; + }; + }; + /** + * Deletes a file in a repository. + * + * You can provide an additional `committer` parameter, which is an object containing information about the committer. Or, you can provide an `author` parameter, which is an object containing information about the author. + * + * The `author` section is optional and is filled in with the `committer` information if omitted. If the `committer` information is omitted, the authenticated user's information is used. + * + * You must provide values for both `name` and `email`, whether you choose to use `author` or `committer`. Otherwise, you'll receive a `422` status code. + * + * **Note:** If you use this endpoint and the "[Create or update file contents](https://docs.github.com/rest/reference/repos/#create-or-update-file-contents)" endpoint in parallel, the concurrent requests will conflict and you will receive errors. You must use these endpoints serially instead. + */ + "repos/delete-file": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** path parameter */ + path: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["file-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message. */ + message: string; + /** @description The blob SHA of the file being deleted. */ + sha: string; + /** @description The branch name. Default: the repository’s default branch (usually `master`) */ + branch?: string; + /** @description object containing information about the committer. */ + committer?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + }; + /** @description object containing information about the author. */ + author?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + }; + }; + }; + }; + }; + /** + * Lists contributors to the specified repository and sorts them by the number of commits per contributor in descending order. This endpoint may return information that is a few hours old because the GitHub REST API v3 caches contributor data to improve performance. + * + * GitHub identifies contributors by author email address. This endpoint groups contribution counts by GitHub user, which includes all associated email addresses. To improve performance, only the first 500 author email addresses in the repository link to GitHub users. The rest will appear as anonymous contributors without associated GitHub user information. + */ + "repos/list-contributors": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Set to `1` or `true` to include anonymous contributors in results. */ + anon?: string; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** if repository contains content */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["contributor"][]; + }; + }; + /** Response if repository is empty */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + "dependabot/list-alerts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** + * A comma-separated list of states. If specified, only alerts with these states will be returned. + * + * Can be: `dismissed`, `fixed`, `open` + */ + state?: components["parameters"]["dependabot-alert-state"]; + /** + * A comma-separated list of severities. If specified, only alerts with these severities will be returned. + * + * Can be: `low`, `medium`, `high`, `critical` + */ + severity?: components["parameters"]["dependabot-alert-severity"]; + /** + * A comma-separated list of ecosystems. If specified, only alerts for these ecosystems will be returned. + * + * Can be: `composer`, `go`, `maven`, `npm`, `nuget`, `pip`, `rubygems`, `rust` + */ + ecosystem?: components["parameters"]["dependabot-alert-ecosystem"]; + /** A comma-separated list of package names. If specified, only alerts for these packages will be returned. */ + package?: components["parameters"]["dependabot-alert-package"]; + /** A comma-separated list of full manifest paths. If specified, only alerts for these manifests will be returned. */ + manifest?: components["parameters"]["dependabot-alert-manifest"]; + /** Scope of the dependency on a Dependabot alert. */ + scope?: components["parameters"]["dependabot-alert-scope"]; + /** + * The property by which to sort the results. + * `created` means when the alert was created. + * `updated` means when the alert's state last changed. + */ + sort?: components["parameters"]["dependabot-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** read permission to use this endpoint. + */ + "dependabot/get-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies a Dependabot alert in its repository. You can find this at the end of the URL for a Dependabot alert within GitHub, or in `number` fields in the response from the `GET /repos/{owner}/{repo}/dependabot/alerts` operation. */ + alert_number: components["parameters"]["dependabot-alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * You must use an access token with the `security_events` scope to use this endpoint with private repositories. + * You can also use tokens with the `public_repo` scope for public repositories only. + * GitHub Apps must have **Dependabot alerts** write permission to use this endpoint. + */ + "dependabot/update-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies a Dependabot alert in its repository. You can find this at the end of the URL for a Dependabot alert within GitHub, or in `number` fields in the response from the `GET /repos/{owner}/{repo}/dependabot/alerts` operation. */ + alert_number: components["parameters"]["dependabot-alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["dependabot-alert-set-state"]; + dismissed_reason?: components["schemas"]["dependabot-alert-dismissed-reason"]; + dismissed_comment?: components["schemas"]["dependabot-alert-dismissed-comment"]; + }; + }; + }; + }; + /** Lists all secrets available in a repository without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/list-repo-secrets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["dependabot-secret"][]; + }; + }; + }; + }; + }; + /** Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/get-repo-public-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-public-key"]; + }; + }; + }; + }; + /** Gets a single repository secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/get-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["dependabot-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a repository secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository + * permission to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "dependabot/create-or-update-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get a repository public key](https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id?: string; + }; + }; + }; + }; + /** Deletes a secret in a repository using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `dependabot_secrets` repository permission to use this endpoint. */ + "dependabot/delete-repo-secret": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Gets the diff of the dependency changes between two commits of a repository, based on the changes to the dependency manifests made in those commits. */ + "dependency-graph/diff-range": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The base and head Git revisions to compare. The Git revisions will be resolved to commit SHAs. Named revisions will be resolved to their corresponding HEAD commits, and an appropriate merge base will be determined. This parameter expects the format `{base}...{head}`. */ + basehead: string; + }; + query: { + /** The full path, relative to the repository root, of the dependency manifest file. */ + name?: components["parameters"]["manifest-path"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["dependency-graph-diff"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a new snapshot of a repository's dependencies. You must authenticate using an access token with the `repo` scope to use this endpoint for a repository that the requesting user has access to. */ + "dependency-graph/create-repository-snapshot": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": { + /** @description ID of the created snapshot. */ + id: number; + /** @description The time at which the snapshot was created. */ + created_at: string; + /** @description Either "SUCCESS", "ACCEPTED", or "INVALID". "SUCCESS" indicates that the snapshot was successfully created and the repository's dependencies were updated. "ACCEPTED" indicates that the snapshot was successfully created, but the repository's dependencies were not updated. "INVALID" indicates that the snapshot was malformed. */ + result: string; + /** @description A message providing further details about the result, such as why the dependencies were not updated. */ + message: string; + }; + }; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["snapshot"]; + }; + }; + }; + /** Simple filtering of deployments is available via query parameters: */ + "repos/list-deployments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The SHA recorded at creation time. */ + sha?: string; + /** The name of the ref. This can be a branch, tag, or SHA. */ + ref?: string; + /** The name of the task for the deployment (e.g., `deploy` or `deploy:migrations`). */ + task?: string; + /** The name of the environment that was deployed to (e.g., `staging` or `production`). */ + environment?: string | null; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["deployment"][]; + }; + }; + }; + }; + /** + * Deployments offer a few configurable parameters with certain defaults. + * + * The `ref` parameter can be any named branch, tag, or SHA. At GitHub we often deploy branches and verify them + * before we merge a pull request. + * + * The `environment` parameter allows deployments to be issued to different runtime environments. Teams often have + * multiple environments for verifying their applications, such as `production`, `staging`, and `qa`. This parameter + * makes it easier to track which environments have requested deployments. The default environment is `production`. + * + * The `auto_merge` parameter is used to ensure that the requested ref is not behind the repository's default branch. If + * the ref _is_ behind the default branch for the repository, we will attempt to merge it for you. If the merge succeeds, + * the API will return a successful merge commit. If merge conflicts prevent the merge from succeeding, the API will + * return a failure response. + * + * By default, [commit statuses](https://docs.github.com/rest/commits/statuses) for every submitted context must be in a `success` + * state. The `required_contexts` parameter allows you to specify a subset of contexts that must be `success`, or to + * specify contexts that have not yet been submitted. You are not required to use commit statuses to deploy. If you do + * not require any contexts or create any commit statuses, the deployment will always succeed. + * + * The `payload` parameter is available for any extra information that a deployment system might need. It is a JSON text + * field that will be passed on when a deployment event is dispatched. + * + * The `task` parameter is used by the deployment system to allow different execution paths. In the web world this might + * be `deploy:migrations` to run schema changes on the system. In the compiled world this could be a flag to compile an + * application with debugging enabled. + * + * Users with `repo` or `repo_deployment` scopes can create a deployment for a given ref. + * + * #### Merged branch response + * You will see this response when GitHub automatically merges the base branch into the topic branch instead of creating + * a deployment. This auto-merge happens when: + * * Auto-merge option is enabled in the repository + * * Topic branch does not include the latest changes on the base branch, which is `master` in the response example + * * There are no merge conflicts + * + * If there are no new commits in the base branch, a new request to create a deployment should give a successful + * response. + * + * #### Merge conflict response + * This error happens when the `auto_merge` option is enabled and when the default branch (in this case `master`), can't + * be merged into the branch that's being deployed (in this case `topic-branch`), due to merge conflicts. + * + * #### Failed commit status checks + * This error happens when the `required_contexts` parameter indicates that one or more contexts need to have a `success` + * status for the commit to be deployed, but one or more of the required contexts do not have a state of `success`. + */ + "repos/create-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["deployment"]; + }; + }; + /** Merged branch response */ + 202: { + content: { + "application/json": { + message?: string; + }; + }; + }; + /** Conflict when there is a merge conflict or the commit's status checks failed */ + 409: unknown; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The ref to deploy. This can be a branch, tag, or SHA. */ + ref: string; + /** + * @description Specifies a task to execute (e.g., `deploy` or `deploy:migrations`). + * @default deploy + */ + task?: string; + /** + * @description Attempts to automatically merge the default branch into the requested ref, if it's behind the default branch. + * @default true + */ + auto_merge?: boolean; + /** @description The [status](https://docs.github.com/rest/commits/statuses) contexts to verify against commit status checks. If you omit this parameter, GitHub verifies all unique contexts before creating a deployment. To bypass checking entirely, pass an empty array. Defaults to all unique contexts. */ + required_contexts?: string[]; + payload?: { [key: string]: unknown } | string; + /** + * @description Name for the target deployment environment (e.g., `production`, `staging`, `qa`). + * @default production + */ + environment?: string; + /** + * @description Short description of the deployment. + * @default + */ + description?: string | null; + /** + * @description Specifies if the given environment is specific to the deployment and will no longer exist at some point in the future. Default: `false` + * @default false + */ + transient_environment?: boolean; + /** @description Specifies if the given environment is one that end-users directly interact with. Default: `true` when `environment` is `production` and `false` otherwise. */ + production_environment?: boolean; + }; + }; + }; + }; + "repos/get-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * If the repository only has one deployment, you can delete the deployment regardless of its status. If the repository has more than one deployment, you can only delete inactive deployments. This ensures that repositories with multiple deployments will always have an active deployment. Anyone with `repo` or `repo_deployment` scopes can delete a deployment. + * + * To set a deployment as inactive, you must: + * + * * Create a new deployment that is active so that the system has a record of the current state, then delete the previously active deployment. + * * Mark the active deployment as inactive by adding any non-successful deployment status. + * + * For more information, see "[Create a deployment](https://docs.github.com/rest/reference/repos/#create-a-deployment)" and "[Create a deployment status](https://docs.github.com/rest/reference/repos#create-a-deployment-status)." + */ + "repos/delete-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Users with pull access can view deployment statuses for a deployment: */ + "repos/list-deployment-statuses": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["deployment-status"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Users with `push` access can create deployment statuses for a given deployment. + * + * GitHub Apps require `read & write` access to "Deployments" and `read-only` access to "Repo contents" (for private repos). OAuth Apps require the `repo_deployment` scope. + */ + "repos/create-deployment-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["deployment-status"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the status. When you set a transient deployment to `inactive`, the deployment will be shown as `destroyed` in GitHub. + * @enum {string} + */ + state: + | "error" + | "failure" + | "inactive" + | "in_progress" + | "queued" + | "pending" + | "success"; + /** + * @description The target URL to associate with this status. This URL should contain output to keep the user updated while the task is running or serve as historical information for what happened in the deployment. **Note:** It's recommended to use the `log_url` parameter, which replaces `target_url`. + * @default + */ + target_url?: string; + /** + * @description The full URL of the deployment's output. This parameter replaces `target_url`. We will continue to accept `target_url` to support legacy uses, but we recommend replacing `target_url` with `log_url`. Setting `log_url` will automatically set `target_url` to the same value. Default: `""` + * @default + */ + log_url?: string; + /** + * @description A short description of the status. The maximum description length is 140 characters. + * @default + */ + description?: string; + /** + * @description Name for the target deployment environment, which can be changed when setting a deploy status. For example, `production`, `staging`, or `qa`. + * @enum {string} + */ + environment?: "production" | "staging" | "qa"; + /** + * @description Sets the URL for accessing your environment. Default: `""` + * @default + */ + environment_url?: string; + /** @description Adds a new `inactive` status to all prior non-transient, non-production environment deployments with the same repository and `environment` name as the created status's deployment. An `inactive` status is only added to deployments that had a `success` state. Default: `true` */ + auto_inactive?: boolean; + }; + }; + }; + }; + /** Users with pull access can view a deployment status for a deployment: */ + "repos/get-deployment-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** deployment_id parameter */ + deployment_id: components["parameters"]["deployment-id"]; + status_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-status"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * You can use this endpoint to trigger a webhook event called `repository_dispatch` when you want activity that happens outside of GitHub to trigger a GitHub Actions workflow or GitHub App webhook. You must configure your GitHub Actions workflow or GitHub App to run when the `repository_dispatch` event occurs. For an example `repository_dispatch` webhook payload, see "[RepositoryDispatchEvent](https://docs.github.com/webhooks/event-payloads/#repository_dispatch)." + * + * The `client_payload` parameter is available for any extra information that your workflow might need. This parameter is a JSON payload that will be passed on when the webhook event is dispatched. For example, the `client_payload` can include a message that a user would like to send using a GitHub Actions workflow. Or the `client_payload` can be used as a test to debug your workflow. + * + * This endpoint requires write access to the repository by providing either: + * + * - Personal access tokens with `repo` scope. For more information, see "[Creating a personal access token for the command line](https://docs.github.com/articles/creating-a-personal-access-token-for-the-command-line)" in the GitHub Help documentation. + * - GitHub Apps with both `metadata:read` and `contents:read&write` permissions. + * + * This input example shows how you can use the `client_payload` as a test to debug your workflow. + */ + "repos/create-dispatch-event": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A custom webhook event name. Must be 100 characters or fewer. */ + event_type: string; + /** @description JSON payload with extra information about the webhook event that your action or workflow may use. */ + client_payload?: { [key: string]: unknown }; + }; + }; + }; + }; + /** + * Lists the environments for a repository. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-all-environments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + /** + * @description The number of environments in this repository + * @example 5 + */ + total_count?: number; + environments?: components["schemas"]["environment"][]; + }; + }; + }; + }; + }; + /** + * **Note:** To get information about name patterns that branches must match in order to deploy to this environment, see "[Get a deployment branch policy](/rest/deployments/branch-policies#get-a-deployment-branch-policy)." + * + * Anyone with read access to the repository can use this endpoint. If the + * repository is private, you must use an access token with the `repo` scope. GitHub + * Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-environment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["environment"]; + }; + }; + }; + }; + /** + * Create or update an environment with protection rules, such as required reviewers. For more information about environment protection rules, see "[Environments](/actions/reference/environments#environment-protection-rules)." + * + * **Note:** To create or update name patterns that branches must match in order to deploy to this environment, see "[Deployment branch policies](/rest/deployments/branch-policies)." + * + * **Note:** To create or update secrets for an environment, see "[Secrets](/rest/reference/actions#secrets)." + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/create-or-update-environment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["environment"]; + }; + }; + /** Validation error when the environment name is invalid or when `protected_branches` and `custom_branch_policies` in `deployment_branch_policy` are set to the same value */ + 422: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + wait_timer?: components["schemas"]["wait-timer"]; + /** @description The people or teams that may review jobs that reference the environment. You can list up to six users or teams as reviewers. The reviewers must have at least read access to the repository. Only one of the required reviewers needs to approve the job for it to proceed. */ + reviewers?: + | { + type?: components["schemas"]["deployment-reviewer-type"]; + /** + * @description The id of the user or team who can review the deployment + * @example 4532992 + */ + id?: number; + }[] + | null; + deployment_branch_policy?: components["schemas"]["deployment-branch-policy-settings"]; + } | null; + }; + }; + }; + /** You must authenticate using an access token with the repo scope to use this endpoint. */ + "repos/delete-an-environment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Default response */ + 204: never; + }; + }; + /** + * Lists the deployment branch policies for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/list-deployment-branch-policies": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + /** + * @description The number of deployment branch policies for the environment. + * @example 2 + */ + total_count: number; + branch_policies: components["schemas"]["deployment-branch-policy"][]; + }; + }; + }; + }; + }; + /** + * Creates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/create-deployment-branch-policy": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-branch-policy"]; + }; + }; + /** Response if the same branch name pattern already exists */ + 303: never; + /** Not Found or `deployment_branch_policy.custom_branch_policies` property for the environment is set to false */ + 404: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["deployment-branch-policy-name-pattern"]; + }; + }; + }; + /** + * Gets a deployment branch policy for an environment. + * + * Anyone with read access to the repository can use this endpoint. If the repository is private, you must use an access token with the `repo` scope. GitHub Apps must have the `actions:read` permission to use this endpoint. + */ + "repos/get-deployment-branch-policy": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + /** The unique identifier of the branch policy. */ + branch_policy_id: components["parameters"]["branch-policy-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-branch-policy"]; + }; + }; + }; + }; + /** + * Updates a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/update-deployment-branch-policy": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + /** The unique identifier of the branch policy. */ + branch_policy_id: components["parameters"]["branch-policy-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deployment-branch-policy"]; + }; + }; + }; + requestBody: { + content: { + "application/json": components["schemas"]["deployment-branch-policy-name-pattern"]; + }; + }; + }; + /** + * Deletes a deployment branch policy for an environment. + * + * You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `administration:write` permission for the repository to use this endpoint. + */ + "repos/delete-deployment-branch-policy": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + /** The unique identifier of the branch policy. */ + branch_policy_id: components["parameters"]["branch-policy-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "activity/list-repo-events": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + "repos/list-forks": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The sort order. `stargazers` will sort by star count. */ + sort?: "newest" | "oldest" | "stargazers" | "watchers"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 400: components["responses"]["bad_request"]; + }; + }; + /** + * Create a fork for the authenticated user. + * + * **Note**: Forking a Repository happens asynchronously. You may have to wait a short period of time before you can access the git objects. If this takes longer than 5 minutes, be sure to contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + */ + "repos/create-fork": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["full-repository"]; + }; + }; + 400: components["responses"]["bad_request"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Optional parameter to specify the organization name if forking into an organization. */ + organization?: string; + /** @description When forking from an existing repository, a new name for the fork. */ + name?: string; + /** @description When forking from an existing repository, fork with only the default branch. */ + default_branch_only?: boolean; + } | null; + }; + }; + }; + "git/create-blob": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["short-blob"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The new blob's content. */ + content: string; + /** + * @description The encoding used for `content`. Currently, `"utf-8"` and `"base64"` are supported. + * @default utf-8 + */ + encoding?: string; + }; + }; + }; + }; + /** + * The `content` in the response will always be Base64 encoded. + * + * _Note_: This API supports blobs up to 100 megabytes in size. + */ + "git/get-blob": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + file_sha: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["blob"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Creates a new Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/create-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The commit message */ + message: string; + /** @description The SHA of the tree object this commit points to */ + tree: string; + /** @description The SHAs of the commits that were the parents of this commit. If omitted or empty, the commit will be written as a root commit. For a single parent, an array of one SHA should be provided; for a merge commit, an array of more than one should be provided. */ + parents?: string[]; + /** @description Information about the author of the commit. By default, the `author` will be the authenticated user and the current date. See the `author` and `committer` object below for details. */ + author?: { + /** @description The name of the author (or committer) of the commit */ + name: string; + /** @description The email of the author (or committer) of the commit */ + email: string; + /** + * Format: date-time + * @description Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + /** @description Information about the person who is making the commit. By default, `committer` will use the information set in `author`. See the `author` and `committer` object below for details. */ + committer?: { + /** @description The name of the author (or committer) of the commit */ + name?: string; + /** @description The email of the author (or committer) of the commit */ + email?: string; + /** + * Format: date-time + * @description Indicates when this commit was authored (or committed). This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + /** @description The [PGP signature](https://en.wikipedia.org/wiki/Pretty_Good_Privacy) of the commit. GitHub adds the signature to the `gpgsig` header of the created commit. For a commit signature to be verifiable by Git or GitHub, it must be an ASCII-armored detached PGP signature over the string commit as it would be written to the object database. To pass a `signature` parameter, you need to first manually create a valid PGP signature, which can be complicated. You may find it easier to [use the command line](https://git-scm.com/book/id/v2/Git-Tools-Signing-Your-Work) to create signed commits. */ + signature?: string; + }; + }; + }; + }; + /** + * Gets a Git [commit object](https://git-scm.com/book/en/v1/Git-Internals-Git-Objects#Commit-Objects). + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in the table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/get-commit": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The SHA of the commit. */ + commit_sha: components["parameters"]["commit-sha"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-commit"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Returns an array of references from your Git database that match the supplied name. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't exist in the repository, but existing refs start with `:ref`, they will be returned as an array. + * + * When you use this endpoint without providing a `:ref`, it will return an array of all the references from your Git database, including notes and stashes if they exist on the server. Anything in the namespace is returned, not just `heads` and `tags`. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * If you request matching references for a branch named `feature` but the branch `feature` doesn't exist, the response can still include other matching head refs that start with the word `feature`, such as `featureA` and `featureB`. + */ + "git/list-matching-refs": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["git-ref"][]; + }; + }; + }; + }; + /** + * Returns a single reference from your Git database. The `:ref` in the URL must be formatted as `heads/` for branches and `tags/` for tags. If the `:ref` doesn't match an existing ref, a `404` is returned. + * + * **Note:** You need to explicitly [request a pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) to trigger a test merge commit, which checks the mergeability of pull requests. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + */ + "git/get-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Creates a reference for your repository. You are unable to create new references for empty repositories, even if the commit SHA-1 hash used exists. Empty repositories are repositories without branches. */ + "git/create-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the fully qualified reference (ie: `refs/heads/master`). If it doesn't start with 'refs' and have at least two slashes, it will be rejected. */ + ref: string; + /** @description The SHA1 value for this reference. */ + sha: string; + /** @example "refs/heads/newbranch" */ + key?: string; + }; + }; + }; + }; + "git/delete-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** ref parameter */ + ref: string; + }; + }; + responses: { + /** Response */ + 204: never; + 422: components["responses"]["validation_failed"]; + }; + }; + "git/update-ref": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The name of the fully qualified reference to update. For example, `refs/heads/master`. If the value doesn't start with `refs` and have at least two slashes, it will be rejected. */ + ref: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-ref"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The SHA1 value to set this reference to */ + sha: string; + /** + * @description Indicates whether to force the update or to make sure the update is a fast-forward update. Leaving this out or setting it to `false` will make sure you're not overwriting work. + * @default false + */ + force?: boolean; + }; + }; + }; + }; + /** + * Note that creating a tag object does not create the reference that makes a tag in Git. If you want to create an annotated tag in Git, you have to do this call to create the tag object, and then [create](https://docs.github.com/rest/reference/git#create-a-reference) the `refs/tags/[tag]` reference. If you want to create a lightweight tag, you only have to [create](https://docs.github.com/rest/reference/git#create-a-reference) the tag reference - this call would be unnecessary. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/create-tag": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-tag"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The tag's name. This is typically a version (e.g., "v0.0.1"). */ + tag: string; + /** @description The tag message. */ + message: string; + /** @description The SHA of the git object this is tagging. */ + object: string; + /** + * @description The type of the object we're tagging. Normally this is a `commit` but it can also be a `tree` or a `blob`. + * @enum {string} + */ + type: "commit" | "tree" | "blob"; + /** @description An object with information about the individual creating the tag. */ + tagger?: { + /** @description The name of the author of the tag */ + name: string; + /** @description The email of the author of the tag */ + email: string; + /** + * Format: date-time + * @description When this object was tagged. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + date?: string; + }; + }; + }; + }; + }; + /** + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "git/get-tag": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + tag_sha: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-tag"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * The tree creation API accepts nested entries. If you specify both a tree and a nested path modifying that tree, this endpoint will overwrite the contents of the tree with the new path contents, and create a new tree structure. + * + * If you use this endpoint to add, delete, or modify the file contents in a tree, you will need to commit the tree and then update a branch to point to the commit. For more information see "[Create a commit](https://docs.github.com/rest/reference/git#create-a-commit)" and "[Update a reference](https://docs.github.com/rest/reference/git#update-a-reference)." + * + * Returns an error if you try to delete a file that does not exist. + */ + "git/create-tree": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["git-tree"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Objects (of `path`, `mode`, `type`, and `sha`) specifying a tree structure. */ + tree: { + /** @description The file referenced in the tree. */ + path?: string; + /** + * @description The file mode; one of `100644` for file (blob), `100755` for executable (blob), `040000` for subdirectory (tree), `160000` for submodule (commit), or `120000` for a blob that specifies the path of a symlink. + * @enum {string} + */ + mode?: "100644" | "100755" | "040000" | "160000" | "120000"; + /** + * @description Either `blob`, `tree`, or `commit`. + * @enum {string} + */ + type?: "blob" | "tree" | "commit"; + /** + * @description The SHA1 checksum ID of the object in the tree. Also called `tree.sha`. If the value is `null` then the file will be deleted. + * + * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. + */ + sha?: string | null; + /** + * @description The content you want this file to have. GitHub will write this blob out and use that SHA for this entry. Use either this, or `tree.sha`. + * + * **Note:** Use either `tree.sha` or `content` to specify the contents of the entry. Using both `tree.sha` and `content` will return an error. + */ + content?: string; + }[]; + /** + * @description The SHA1 of an existing Git tree object which will be used as the base for the new tree. If provided, a new Git tree object will be created from entries in the Git tree object pointed to by `base_tree` and entries defined in the `tree` parameter. Entries defined in the `tree` parameter will overwrite items from `base_tree` with the same `path`. If you're creating new changes on a branch, then normally you'd set `base_tree` to the SHA1 of the Git tree object of the current latest commit on the branch you're working on. + * If not provided, GitHub will create a new Git tree object from only the entries defined in the `tree` parameter. If you create a new commit pointing to such a tree, then all files which were a part of the parent commit's tree and were not defined in the `tree` parameter will be listed as deleted by the new commit. + */ + base_tree?: string; + }; + }; + }; + }; + /** + * Returns a single tree using the SHA1 value for that tree. + * + * If `truncated` is `true` in the response then the number of items in the `tree` array exceeded our maximum limit. If you need to fetch more items, use the non-recursive method of fetching trees, and fetch one sub-tree at a time. + * + * + * **Note**: The limit for the `tree` array is 100,000 entries with a maximum size of 7 MB when using the `recursive` parameter. + */ + "git/get-tree": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + tree_sha: string; + }; + query: { + /** Setting this parameter to any value returns the objects or subtrees referenced by the tree specified in `:tree_sha`. For example, setting `recursive` to any of the following will enable returning objects or subtrees: `0`, `1`, `"true"`, and `"false"`. Omit this parameter to prevent recursively returning objects or subtrees. */ + recursive?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["git-tree"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Lists webhooks for a repository. `last response` may return null if there have not been any deliveries within 30 days. */ + "repos/list-webhooks": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["hook"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Repositories can have multiple webhooks installed. Each webhook should have a unique `config`. Multiple webhooks can + * share the same `config` as long as those webhooks do not have any `events` that overlap. + */ + "repos/create-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Use `web` to create a webhook. Default: `web`. This parameter only accepts the value `web`. */ + name?: string; + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ + config?: { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "abc" */ + token?: string; + /** @example "sha256" */ + digest?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + } | null; + }; + }; + }; + /** Returns a webhook configured in a repository. To get only the webhook `config` properties, see "[Get a webhook configuration for a repository](/rest/reference/repos#get-a-webhook-configuration-for-a-repository)." */ + "repos/get-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/delete-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Updates a webhook configured in a repository. If you previously had a `secret` set, you must provide the same `secret` or set a new `secret` or the secret will be removed. If you are only updating individual webhook `config` properties, use "[Update a webhook configuration for a repository](/rest/reference/repos#update-a-webhook-configuration-for-a-repository)." */ + "repos/update-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Key/value pairs to provide settings for this webhook. [These are defined below](https://docs.github.com/rest/reference/repos#create-hook-config-params). */ + config?: { + url: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + /** @example "bar@example.com" */ + address?: string; + /** @example "The Serious Room" */ + room?: string; + }; + /** + * @description Determines what [events](https://docs.github.com/webhooks/event-payloads) the hook is triggered for. This replaces the entire array of events. + * @default [ + * "push" + * ] + */ + events?: string[]; + /** @description Determines a list of events to be added to the list of events that the Hook triggers for. */ + add_events?: string[]; + /** @description Determines a list of events to be removed from the list of events that the Hook triggers for. */ + remove_events?: string[]; + /** + * @description Determines if notifications are sent when the webhook is triggered. Set to `true` to send notifications. + * @default true + */ + active?: boolean; + }; + }; + }; + }; + /** + * Returns the webhook configuration for a repository. To get more information about the webhook, including the `active` state and `events`, use "[Get a repository webhook](/rest/reference/orgs#get-a-repository-webhook)." + * + * Access tokens must have the `read:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:read` permission. + */ + "repos/get-webhook-config-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + }; + /** + * Updates the webhook configuration for a repository. To update more information about the webhook, including the `active` state and `events`, use "[Update a repository webhook](/rest/reference/orgs#update-a-repository-webhook)." + * + * Access tokens must have the `write:repo_hook` or `repo` scope, and GitHub Apps must have the `repository_hooks:write` permission. + */ + "repos/update-webhook-config-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["webhook-config"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + url?: components["schemas"]["webhook-config-url"]; + content_type?: components["schemas"]["webhook-config-content-type"]; + secret?: components["schemas"]["webhook-config-secret"]; + insecure_ssl?: components["schemas"]["webhook-config-insecure-ssl"]; + }; + }; + }; + }; + /** Returns a list of webhook deliveries for a webhook configured in a repository. */ + "repos/list-webhook-deliveries": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Used for pagination: the starting delivery from which the page of deliveries is fetched. Refer to the `link` header for the next and previous page cursors. */ + cursor?: components["parameters"]["cursor"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery-item"][]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns a delivery for a webhook configured in a repository. */ + "repos/get-webhook-delivery": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hook-delivery"]; + }; + }; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Redeliver a webhook delivery for a webhook configured in a repository. */ + "repos/redeliver-webhook-delivery": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + delivery_id: components["parameters"]["delivery-id"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 400: components["responses"]["bad_request"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** This will trigger a [ping event](https://docs.github.com/webhooks/#ping-event) to be sent to the hook. */ + "repos/ping-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * This will trigger the hook with the latest push to the current repository if the hook is subscribed to `push` events. If the hook is not subscribed to `push` events, the server will respond with 204 but no test POST will be generated. + * + * **Note**: Previously `/repos/:owner/:repo/hooks/:hook_id/test` + */ + "repos/test-push-webhook": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the hook. */ + hook_id: components["parameters"]["hook-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** + * View the progress of an import. + * + * **Import status** + * + * This section includes details about the possible values of the `status` field of the Import Progress response. + * + * An import that does not have errors will progress through these steps: + * + * * `detecting` - the "detection" step of the import is in progress because the request did not include a `vcs` parameter. The import is identifying the type of source control present at the URL. + * * `importing` - the "raw" step of the import is in progress. This is where commit data is fetched from the original repository. The import progress response will include `commit_count` (the total number of raw commits that will be imported) and `percent` (0 - 100, the current progress through the import). + * * `mapping` - the "rewrite" step of the import is in progress. This is where SVN branches are converted to Git branches, and where author updates are applied. The import progress response does not include progress information. + * * `pushing` - the "push" step of the import is in progress. This is where the importer updates the repository on GitHub. The import progress response will include `push_percent`, which is the percent value reported by `git push` when it is "Writing objects". + * * `complete` - the import is complete, and the repository is ready on GitHub. + * + * If there are problems, you will see one of these in the `status` field: + * + * * `auth_failed` - the import requires authentication in order to connect to the original repository. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `error` - the import encountered an error. The import progress response will include the `failed_step` and an error message. Contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api) for more information. + * * `detection_needs_auth` - the importer requires authentication for the originating repository to continue detection. To update authentication for the import, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * * `detection_found_nothing` - the importer didn't recognize any source control at the URL. To resolve, [Cancel the import](https://docs.github.com/rest/reference/migrations#cancel-an-import) and [retry](https://docs.github.com/rest/reference/migrations#start-an-import) with the correct URL. + * * `detection_found_multiple` - the importer found several projects or repositories at the provided URL. When this is the case, the Import Progress response will also include a `project_choices` field with the possible project choices as values. To update project choice, please see the [Update an import](https://docs.github.com/rest/reference/migrations#update-an-import) section. + * + * **The project_choices field** + * + * When multiple projects are found at the provided URL, the response hash will include a `project_choices` field, the value of which is an array of hashes each representing a project choice. The exact key/value pairs of the project hashes will differ depending on the version control type. + * + * **Git LFS related fields** + * + * This section includes details about Git LFS related fields that may be present in the Import Progress response. + * + * * `use_lfs` - describes whether the import has been opted in or out of using Git LFS. The value can be `opt_in`, `opt_out`, or `undecided` if no action has been taken. + * * `has_large_files` - the boolean value describing whether files larger than 100MB were found during the `importing` step. + * * `large_files_size` - the total size in gigabytes of files larger than 100MB found in the originating repository. + * * `large_files_count` - the total number of files larger than 100MB found in the originating repository. To see a list of these files, make a "Get Large Files" request. + */ + "migrations/get-import-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Start a source import to a GitHub repository using GitHub Importer. */ + "migrations/start-import": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The URL of the originating repository. */ + vcs_url: string; + /** + * @description The originating VCS type. Without this parameter, the import job will take additional time to detect the VCS type before beginning the import. This detection step will be reflected in the response. + * @enum {string} + */ + vcs?: "subversion" | "git" | "mercurial" | "tfvc"; + /** @description If authentication is required, the username to provide to `vcs_url`. */ + vcs_username?: string; + /** @description If authentication is required, the password to provide to `vcs_url`. */ + vcs_password?: string; + /** @description For a tfvc import, the name of the project that is being imported. */ + tfvc_project?: string; + }; + }; + }; + }; + /** Stop an import for a repository. */ + "migrations/cancel-import": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * An import can be updated with credentials or a project choice by passing in the appropriate parameters in this API + * request. If no parameters are provided, the import will be restarted. + * + * Some servers (e.g. TFS servers) can have several projects at a single URL. In those cases the import progress will + * have the status `detection_found_multiple` and the Import Progress response will include a `project_choices` array. + * You can select the project to import by providing one of the objects in the `project_choices` array in the update request. + */ + "migrations/update-import": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The username to provide to the originating repository. */ + vcs_username?: string; + /** @description The password to provide to the originating repository. */ + vcs_password?: string; + /** + * @description The type of version control system you are migrating from. + * @example "git" + * @enum {string} + */ + vcs?: "subversion" | "tfvc" | "git" | "mercurial"; + /** + * @description For a tfvc import, the name of the project that is being imported. + * @example "project1" + */ + tfvc_project?: string; + } | null; + }; + }; + }; + /** + * Each type of source control system represents authors in a different way. For example, a Git commit author has a display name and an email address, but a Subversion commit author just has a username. The GitHub Importer will make the author information valid, but the author might not be correct. For example, it will change the bare Subversion username `hubot` into something like `hubot `. + * + * This endpoint and the [Map a commit author](https://docs.github.com/rest/reference/migrations#map-a-commit-author) endpoint allow you to provide correct Git author information. + */ + "migrations/get-commit-authors": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** A user ID. Only return users with an ID greater than this ID. */ + since?: components["parameters"]["since-user"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-author"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Update an author's identity for the import. Your application can continue updating authors any time before you push new commits to the repository. */ + "migrations/map-commit-author": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + author_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-author"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The new Git author email. */ + email?: string; + /** @description The new Git author name. */ + name?: string; + }; + }; + }; + }; + /** List files larger than 100MB found during the import */ + "migrations/get-large-files": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["porter-large-file"][]; + }; + }; + }; + }; + /** You can import repositories from Subversion, Mercurial, and TFS that include files larger than 100MB. This ability is powered by [Git LFS](https://git-lfs.github.com). You can learn more about our LFS feature and working with large files [on our help site](https://docs.github.com/articles/versioning-large-files/). */ + "migrations/set-lfs-preference": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["import"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Whether to store large files during the import. `opt_in` means large files will be stored using Git LFS. `opt_out` means large files will be removed during the import. + * @enum {string} + */ + use_lfs: "opt_in" | "opt_out"; + }; + }; + }; + }; + /** + * Enables an authenticated GitHub App to find the repository's installation information. The installation's account type will be either an organization or a user account, depending which account the repository belongs to. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-repo-installation": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Shows which type of GitHub user can interact with this repository and when the restriction expires. If there are no restrictions, you will see an empty response. */ + "interactions/get-restrictions-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": Partial< + components["schemas"]["interaction-limit-response"] + > & + Partial<{ [key: string]: unknown }>; + }; + }; + }; + }; + /** Temporarily restricts interactions to a certain type of GitHub user within the given repository. You must have owner or admin access to set these restrictions. If an interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + "interactions/set-restrictions-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + /** Response */ + 409: unknown; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + }; + /** Removes all interaction restrictions from the given repository. You must have owner or admin access to remove restrictions. If the interaction limit is set for the user or organization that owns this repository, you will receive a `409 Conflict` response and will not be able to use this endpoint to change the interaction limit for a single repository. */ + "interactions/remove-restrictions-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Response */ + 409: unknown; + }; + }; + /** When authenticating as a user with admin rights to a repository, this endpoint will list all currently open repository invitations. */ + "repos/list-invitations": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["repository-invitation"][]; + }; + }; + }; + }; + "repos/delete-invitation": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "repos/update-invitation": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-invitation"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permissions that the associated user will have on the repository. Valid values are `read`, `write`, `maintain`, `triage`, and `admin`. + * @enum {string} + */ + permissions?: "read" | "write" | "maintain" | "triage" | "admin"; + }; + }; + }; + }; + /** + * List issues in a repository. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** If an `integer` is passed, it should refer to a milestone by its `number` field. If the string `*` is passed, issues with any milestone are accepted. If the string `none` is passed, issues without milestones are returned. */ + milestone?: string; + /** Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + /** Can be the name of a user. Pass in `none` for issues with no assigned user, and `*` for issues assigned to any user. */ + assignee?: string; + /** The user that created the issue. */ + creator?: string; + /** A user that's mentioned in the issue. */ + mentioned?: string; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Any user with pull access to a repository can create an issue. If [issues are disabled in the repository](https://docs.github.com/articles/disabling-issues/), the API returns a `410 Gone` status. + * + * This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "issues/create": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the issue. */ + title: string | number; + /** @description The contents of the issue. */ + body?: string; + /** @description Login for the user that this issue should be assigned to. _NOTE: Only users with push access can set the assignee for new issues. The assignee is silently dropped otherwise. **This field is deprecated.**_ */ + assignee?: string | null; + milestone?: (string | number) | null; + /** @description Labels to associate with this issue. _NOTE: Only users with push access can set labels for new issues. Labels are silently dropped otherwise._ */ + labels?: ( + | string + | { + id?: number; + name?: string; + description?: string | null; + color?: string | null; + } + )[]; + /** @description Logins for Users to assign to this issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ + assignees?: string[]; + }; + }; + }; + }; + /** By default, Issue Comments are ordered by ascending ID. */ + "issues/list-comments-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** Either `asc` or `desc`. Ignored without the `sort` parameter. */ + direction?: "asc" | "desc"; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "issues/get-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/delete-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/update-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + }; + }; + }; + }; + /** List the reactions to an [issue comment](https://docs.github.com/rest/reference/issues#comments). */ + "reactions/list-for-issue-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). A response with an HTTP `200` status means that you already added the reaction type to this issue comment. */ + "reactions/create-for-issue-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/issues/comments/:comment_id/reactions/:reaction_id`. + * + * Delete a reaction to an [issue comment](https://docs.github.com/rest/reference/issues#comments). + */ + "reactions/delete-for-issue-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/list-events-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-event"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + "issues/get-event": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + event_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue-event"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * The API returns a [`301 Moved Permanently` status](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-redirects-redirects) if the issue was + * [transferred](https://docs.github.com/articles/transferring-an-issue-to-another-repository/) to another repository. If + * the issue was transferred to or deleted from a repository where the authenticated user lacks read access, the API + * returns a `404 Not Found` status. If the issue was deleted from a repository where the authenticated user has read + * access, the API returns a `410 Gone` status. To receive webhook events for transferred and deleted issues, subscribe + * to the [`issues`](https://docs.github.com/webhooks/event-payloads/#issues) webhook. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Issue owners and users with push access can edit an issue. */ + "issues/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the issue. */ + title?: (string | number) | null; + /** @description The contents of the issue. */ + body?: string | null; + /** @description Login for the user that this issue should be assigned to. **This field is deprecated.** */ + assignee?: string | null; + /** + * @description State of the issue. Either `open` or `closed`. + * @enum {string} + */ + state?: "open" | "closed"; + /** + * @description The reason for the current state + * @example not_planned + * @enum {string|null} + */ + state_reason?: ("completed" | "not_planned" | "reopened") | null; + milestone?: (string | number) | null; + /** @description Labels to associate with this issue. Pass one or more Labels to _replace_ the set of Labels on this Issue. Send an empty array (`[]`) to clear all Labels from the Issue. _NOTE: Only users with push access can set labels for issues. Labels are silently dropped otherwise._ */ + labels?: ( + | string + | { + id?: number; + name?: string; + description?: string | null; + color?: string | null; + } + )[]; + /** @description Logins for Users to assign to this issue. Pass one or more user logins to _replace_ the set of assignees on this Issue. Send an empty array (`[]`) to clear all assignees from the Issue. _NOTE: Only users with push access can set assignees for new issues. Assignees are silently dropped otherwise._ */ + assignees?: string[]; + }; + }; + }; + }; + /** Adds up to 10 assignees to an issue. Users already assigned to an issue are not replaced. */ + "issues/add-assignees": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Usernames of people to assign this issue to. _NOTE: Only users with push access can add assignees to an issue. Assignees are silently ignored otherwise._ */ + assignees?: string[]; + }; + }; + }; + }; + /** Removes one or more assignees from an issue. */ + "issues/remove-assignees": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["issue"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Usernames of assignees to remove from an issue. _NOTE: Only users with push access can remove assignees from an issue. Assignees are silently ignored otherwise._ */ + assignees: string[]; + }; + }; + }; + }; + /** Issue Comments are ordered by ascending ID. */ + "issues/list-comments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + "issues/create-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["issue-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The contents of the comment. */ + body: string; + }; + }; + }; + }; + "issues/list-events": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue-event-for-issue"][]; + }; + }; + 410: components["responses"]["gone"]; + }; + }; + "issues/list-labels-on-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Removes any previous labels and sets the new labels for an issue. */ + "issues/set-labels": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": + | { + /** @description The names of the labels to set for the issue. The labels you set replace any existing labels. You can pass an empty array to remove all labels. Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. You can also add labels to the existing labels for an issue. For more information, see "[Add labels to an issue](https://docs.github.com/rest/reference/issues#add-labels-to-an-issue)." */ + labels?: string[]; + } + | { + labels?: { + name: string; + }[]; + }; + }; + }; + }; + "issues/add-labels": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": + | { + /** @description The names of the labels to add to the issue's existing labels. You can pass an empty array to remove all labels. Alternatively, you can pass a single label as a `string` or an `array` of labels directly, but GitHub recommends passing an object with the `labels` key. You can also replace all of the labels for an issue. For more information, see "[Set labels for an issue](https://docs.github.com/rest/reference/issues#set-labels-for-an-issue)." */ + labels?: string[]; + } + | { + labels?: { + name: string; + }[]; + }; + }; + }; + }; + "issues/remove-all-labels": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Removes the specified label from the issue, and returns the remaining labels on the issue. This endpoint returns a `404 Not Found` status if the label does not exist. */ + "issues/remove-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 301: components["responses"]["moved_permanently"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** + * Users with push access can lock an issue or pull request's conversation. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "issues/lock": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The reason for locking the issue or pull request conversation. Lock will fail if you don't use one of these reasons: + * \* `off-topic` + * \* `too heated` + * \* `resolved` + * \* `spam` + * @enum {string} + */ + lock_reason?: "off-topic" | "too heated" | "resolved" | "spam"; + } | null; + }; + }; + }; + /** Users with push access can unlock an issue's conversation. */ + "issues/unlock": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** List the reactions to an [issue](https://docs.github.com/rest/reference/issues). */ + "reactions/list-for-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to an issue. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + /** Create a reaction to an [issue](https://docs.github.com/rest/reference/issues/). A response with an HTTP `200` status means that you already added the reaction type to this issue. */ + "reactions/create-for-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the issue. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/issues/:issue_number/reactions/:reaction_id`. + * + * Delete a reaction to an [issue](https://docs.github.com/rest/reference/issues/). + */ + "reactions/delete-for-issue": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/list-events-for-timeline": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the issue. */ + issue_number: components["parameters"]["issue-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["timeline-issue-events"][]; + }; + }; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + }; + }; + "repos/list-deploy-keys": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["deploy-key"][]; + }; + }; + }; + }; + /** You can create a read-only deploy key. */ + "repos/create-deploy-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["deploy-key"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A name for the key. */ + title?: string; + /** @description The contents of the key. */ + key: string; + /** + * @description If `true`, the key will only be able to read repository contents. Otherwise, the key will be able to read and write. + * + * Deploy keys with write access can perform the same actions as an organization member with admin access, or a collaborator on a personal repository. For more information, see "[Repository permission levels for an organization](https://docs.github.com/articles/repository-permission-levels-for-an-organization/)" and "[Permission levels for a user account repository](https://docs.github.com/articles/permission-levels-for-a-user-account-repository/)." + */ + read_only?: boolean; + }; + }; + }; + }; + "repos/get-deploy-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["deploy-key"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Deploy keys are immutable. If you need to update a key, remove the key and create a new one instead. */ + "repos/delete-deploy-key": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/list-labels-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/create-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["label"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ + name: string; + /** @description The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ + color?: string; + /** @description A short description of the label. Must be 100 characters or fewer. */ + description?: string; + }; + }; + }; + }; + "issues/get-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/delete-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "issues/update-label": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + name: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["label"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The new name of the label. Emoji can be added to label names, using either native emoji or colon-style markup. For example, typing `:strawberry:` will render the emoji ![:strawberry:](https://github.githubassets.com/images/icons/emoji/unicode/1f353.png ":strawberry:"). For a full list of available emoji and codes, see "[Emoji cheat sheet](https://github.com/ikatyang/emoji-cheat-sheet)." */ + new_name?: string; + /** @description The [hexadecimal color code](http://www.color-hex.com/) for the label, without the leading `#`. */ + color?: string; + /** @description A short description of the label. Must be 100 characters or fewer. */ + description?: string; + }; + }; + }; + }; + /** Lists languages for the specified repository. The value shown for each language is the number of bytes of code written in that language. */ + "repos/list-languages": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["language"]; + }; + }; + }; + }; + "repos/enable-lfs-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + /** + * We will return a 403 with one of the following messages: + * + * - Git LFS support not enabled because Git LFS is globally disabled. + * - Git LFS support not enabled because Git LFS is disabled for the root repository in the network. + * - Git LFS support not enabled because Git LFS is disabled for . + */ + 403: unknown; + }; + }; + "repos/disable-lfs-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * This method returns the contents of the repository's license file, if one is detected. + * + * Similar to [Get repository content](https://docs.github.com/rest/reference/repos#get-repository-content), this method also supports [custom media types](https://docs.github.com/rest/overview/media-types) for retrieving the raw license content or rendered license HTML. + */ + "licenses/get-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["license-content"]; + }; + }; + }; + }; + /** Sync a branch of a forked repository to keep it up-to-date with the upstream repository. */ + "repos/merge-upstream": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** The branch has been successfully synced with the upstream repository */ + 200: { + content: { + "application/json": components["schemas"]["merged-upstream"]; + }; + }; + /** The branch could not be synced because of a merge conflict */ + 409: unknown; + /** The branch could not be synced for some other reason */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the branch which should be updated to match upstream. */ + branch: string; + }; + }; + }; + }; + "repos/merge": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Successful Response (The resulting merge commit) */ + 201: { + content: { + "application/json": components["schemas"]["commit"]; + }; + }; + /** Response when already merged */ + 204: never; + 403: components["responses"]["forbidden"]; + /** Not Found when the base or head does not exist */ + 404: unknown; + /** Conflict when there is a merge conflict */ + 409: unknown; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the base branch that the head will be merged into. */ + base: string; + /** @description The head to merge. This can be a branch name or a commit SHA1. */ + head: string; + /** @description Commit message to use for the merge commit. If omitted, a default message will be used. */ + commit_message?: string; + }; + }; + }; + }; + "issues/list-milestones": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The state of the milestone. Either `open`, `closed`, or `all`. */ + state?: "open" | "closed" | "all"; + /** What to sort results by. Either `due_on` or `completeness`. */ + sort?: "due_on" | "completeness"; + /** The direction of the sort. Either `asc` or `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["milestone"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/create-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the milestone. */ + title: string; + /** + * @description The state of the milestone. Either `open` or `closed`. + * @default open + * @enum {string} + */ + state?: "open" | "closed"; + /** @description A description of the milestone. */ + description?: string; + /** + * Format: date-time + * @description The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + due_on?: string; + }; + }; + }; + }; + "issues/get-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "issues/delete-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + "issues/update-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["milestone"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the milestone. */ + title?: string; + /** + * @description The state of the milestone. Either `open` or `closed`. + * @default open + * @enum {string} + */ + state?: "open" | "closed"; + /** @description A description of the milestone. */ + description?: string; + /** + * Format: date-time + * @description The milestone due date. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + due_on?: string; + }; + }; + }; + }; + "issues/list-labels-for-milestone": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the milestone. */ + milestone_number: components["parameters"]["milestone-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["label"][]; + }; + }; + }; + }; + /** List all notifications for the current user. */ + "activity/list-repo-notifications-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** If `true`, show notifications marked as read. */ + all?: components["parameters"]["all"]; + /** If `true`, only shows notifications in which the user is directly participating or mentioned. */ + participating?: components["parameters"]["participating"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: components["parameters"]["before"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["thread"][]; + }; + }; + }; + }; + /** Marks all notifications in a repository as "read" removes them from the [default view on GitHub](https://github.com/notifications). If the number of notifications is too large to complete in one request, you will receive a `202 Accepted` status and GitHub will run an asynchronous process to mark notifications as "read." To check whether any "unread" notifications remain, you can use the [List repository notifications for the authenticated user](https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user) endpoint and pass the query parameter `all=false`. */ + "activity/mark-repo-notifications-as-read": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": { + message?: string; + url?: string; + }; + }; + }; + /** Reset Content */ + 205: unknown; + }; + requestBody: { + content: { + "application/json": { + /** + * Format: date-time + * @description Describes the last point that notifications were checked. Anything updated since this time will not be marked as read. If you omit this parameter, all notifications are marked as read. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. Default: The current timestamp. + */ + last_read_at?: string; + }; + }; + }; + }; + "repos/get-pages": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Updates information for a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages). */ + "repos/update-information-about-pages-site": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 400: components["responses"]["bad_request"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Specify a custom domain for the repository. Sending a `null` value will remove the custom domain. For more about custom domains, see "[Using a custom domain with GitHub Pages](https://docs.github.com/articles/using-a-custom-domain-with-github-pages/)." */ + cname?: string | null; + /** @description Specify whether HTTPS should be enforced for the repository. */ + https_enforced?: boolean; + /** @description Configures access controls for the GitHub Pages site. If public is set to `true`, the site is accessible to anyone on the internet. If set to `false`, the site will only be accessible to users who have at least `read` access to the repository that published the site. This includes anyone in your Enterprise if the repository is set to `internal` visibility. This feature is only available to repositories in an organization on an Enterprise plan. */ + public?: boolean; + /** + * @description The process by which the GitHub Pages site will be built. `workflow` means that the site is built by a custom GitHub Actions workflow. `legacy` means that the site is built by GitHub when changes are pushed to a specific branch. + * @enum {string} + */ + build_type?: "legacy" | "workflow"; + source?: Partial<"gh-pages" | "master" | "master /docs"> & + Partial<{ + /** @description The repository branch used to publish your site's source files. */ + branch: string; + /** + * @description The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. + * @enum {string} + */ + path: "/" | "/docs"; + }>; + }; + }; + }; + }; + /** Configures a GitHub Pages site. For more information, see "[About GitHub Pages](/github/working-with-github-pages/about-github-pages)." */ + "repos/create-pages-site": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["page"]; + }; + }; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The process in which the Page will be built. Possible values are `"legacy"` and `"workflow"`. + * @enum {string} + */ + build_type?: "legacy" | "workflow"; + /** @description The source branch and directory used to publish your Pages site. */ + source?: { + /** @description The repository branch used to publish your site's source files. */ + branch: string; + /** + * @description The repository directory that includes the source files for the Pages site. Allowed paths are `/` or `/docs`. Default: `/` + * @default / + * @enum {string} + */ + path?: "/" | "/docs"; + }; + } | null; + }; + }; + }; + "repos/delete-pages-site": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "repos/list-pages-builds": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["page-build"][]; + }; + }; + }; + }; + /** + * You can request that your site be built from the latest revision on the default branch. This has the same effect as pushing a commit to your default branch, but does not require an additional commit. Manually triggering page builds can be helpful when diagnosing build warnings and failures. + * + * Build requests are limited to one concurrent build per repository and one concurrent build per requester. If you request a build while another is still in progress, the second request will be queued until the first completes. + */ + "repos/request-pages-build": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["page-build-status"]; + }; + }; + }; + }; + "repos/get-latest-pages-build": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page-build"]; + }; + }; + }; + }; + "repos/get-pages-build": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + build_id: number; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page-build"]; + }; + }; + }; + }; + /** + * Create a GitHub Pages deployment for a repository. + * + * Users must have write permissions. GitHub Apps must have the `pages:write` permission to use this endpoint. + */ + "repos/create-pages-deployment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["page-deployment"]; + }; + }; + 400: components["responses"]["bad_request"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The URL of an artifact that contains the .zip or .tar of static assets to deploy. The artifact belongs to the repository. */ + artifact_url: string; + /** + * @description The target environment for this GitHub Pages deployment. + * @default github-pages + */ + environment?: string; + /** + * @description A unique string that represents the version of the build for this deployment. + * @default GITHUB_SHA + */ + pages_build_version: string; + /** @description The OIDC token issued by GitHub Actions certifying the origin of the deployment. */ + oidc_token: string; + }; + }; + }; + }; + /** + * Gets a health check of the DNS settings for the `CNAME` record configured for a repository's GitHub Pages. + * + * The first request to this endpoint returns a `202 Accepted` status and starts an asynchronous background task to get the results for the domain. After the background task completes, subsequent requests to this endpoint return a `200 OK` status with the health check results in the response. + * + * Users must have admin or owner permissions. GitHub Apps must have the `pages:write` and `administration:write` permission to use this endpoint. + */ + "repos/get-pages-health-check": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pages-health-check"]; + }; + }; + /** Empty response */ + 202: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Custom domains are not available for GitHub Pages */ + 400: unknown; + 404: components["responses"]["not_found"]; + /** There isn't a CNAME for this page */ + 422: unknown; + }; + }; + /** Lists the projects in a repository. Returns a `404 Not Found` status if projects are disabled in the repository. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/list-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Indicates the state of the projects to return. */ + state?: "open" | "closed" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** Creates a repository project board. Returns a `410 Gone` status if projects are disabled in the repository or if the repository does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/create-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 410: components["responses"]["gone"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the project. */ + name: string; + /** @description The description of the project. */ + body?: string; + }; + }; + }; + }; + /** Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. */ + "pulls/list": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Either `open`, `closed`, or `all` to filter by state. */ + state?: "open" | "closed" | "all"; + /** Filter pulls by head user or head organization and branch name in the format of `user:ref-name` or `organization:ref-name`. For example: `github:new-script-format` or `octocat:test-branch`. */ + head?: string; + /** Filter pulls by base branch name. Example: `gh-pages`. */ + base?: string; + /** What to sort results by. `popularity` will sort by the number of comments. `long-running` will sort by date created and will limit the results to pull requests that have been open for more than a month and have had activity within the past month. */ + sort?: "created" | "updated" | "popularity" | "long-running"; + /** The direction of the sort. Default: `desc` when sort is `created` or sort is not specified, otherwise `asc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-rate-limits)" for details. + */ + "pulls/create": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the new pull request. Required unless `issue` is specified. */ + title?: string; + /** @description The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace `head` with a user like this: `username:branch`. */ + head: string; + /** @description The name of the branch you want the changes pulled into. This should be an existing branch on the current repository. You cannot submit a pull request to one repository that requests a merge to a base of another repository. */ + base: string; + /** @description The contents of the pull request. */ + body?: string; + /** @description Indicates whether [maintainers can modify](https://docs.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ + maintainer_can_modify?: boolean; + /** @description Indicates whether the pull request is a draft. See "[Draft Pull Requests](https://docs.github.com/en/articles/about-pull-requests#draft-pull-requests)" in the GitHub Help documentation to learn more. */ + draft?: boolean; + /** + * @description An issue in the repository to convert to a pull request. The issue title, body, and comments will become the title, body, and comments on the new pull request. Required unless `title` is specified. + * @example 1 + */ + issue?: number; + }; + }; + }; + }; + /** Lists review comments for all pull requests in a repository. By default, review comments are in ascending order by ID. */ + "pulls/list-review-comments-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + sort?: "created" | "updated" | "created_at"; + /** The direction to sort results. Ignored without `sort` parameter. */ + direction?: "asc" | "desc"; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review-comment"][]; + }; + }; + }; + }; + /** Provides details for a review comment. */ + "pulls/get-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Deletes a review comment. */ + "pulls/delete-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + }; + }; + /** Enables you to edit a review comment. */ + "pulls/update-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the reply to the review comment. */ + body: string; + }; + }; + }; + }; + /** List the reactions to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). */ + "reactions/list-for-pull-request-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a pull request review comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#comments). A response with an HTTP `200` status means that you already added the reaction type to this pull request review comment. */ + "reactions/create-for-pull-request-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the pull request review comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE /repositories/:repository_id/pulls/comments/:comment_id/reactions/:reaction_id.` + * + * Delete a reaction to a [pull request review comment](https://docs.github.com/rest/reference/pulls#review-comments). + */ + "reactions/delete-for-pull-request-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * Lists details of a pull request by providing its number. + * + * When you get, [create](https://docs.github.com/rest/reference/pulls/#create-a-pull-request), or [edit](https://docs.github.com/rest/reference/pulls#update-a-pull-request) a pull request, GitHub creates a merge commit to test whether the pull request can be automatically merged into the base branch. This test commit is not added to the base branch or the head branch. You can review the status of the test commit using the `mergeable` key. For more information, see "[Checking mergeability of pull requests](https://docs.github.com/rest/guides/getting-started-with-the-git-database-api#checking-mergeability-of-pull-requests)". + * + * The value of the `mergeable` attribute can be `true`, `false`, or `null`. If the value is `null`, then GitHub has started a background job to compute the mergeability. After giving the job time to complete, resubmit the request. When the job finishes, you will see a non-`null` value for the `mergeable` attribute in the response. If `mergeable` is `true`, then `merge_commit_sha` will be the SHA of the _test_ merge commit. + * + * The value of the `merge_commit_sha` attribute changes depending on the state of the pull request. Before merging a pull request, the `merge_commit_sha` attribute holds the SHA of the _test_ merge commit. After merging a pull request, the `merge_commit_sha` attribute changes depending on how you merged the pull request: + * + * * If merged as a [merge commit](https://docs.github.com/articles/about-merge-methods-on-github/), `merge_commit_sha` represents the SHA of the merge commit. + * * If merged via a [squash](https://docs.github.com/articles/about-merge-methods-on-github/#squashing-your-merge-commits), `merge_commit_sha` represents the SHA of the squashed commit on the base branch. + * * If [rebased](https://docs.github.com/articles/about-merge-methods-on-github/#rebasing-and-merging-your-commits), `merge_commit_sha` represents the commit that the base branch was updated to. + * + * Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + */ + "pulls/get": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. */ + 200: { + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Draft pull requests are available in public repositories with GitHub Free and GitHub Free for organizations, GitHub Pro, and legacy per-repository billing plans, and in public and private repositories with GitHub Team and GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To open or update a pull request in a public repository, you must have write access to the head or the source branch. For organization-owned repositories, you must be a member of the organization that owns the repository to open or update a pull request. + */ + "pulls/update": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The title of the pull request. */ + title?: string; + /** @description The contents of the pull request. */ + body?: string; + /** + * @description State of this Pull Request. Either `open` or `closed`. + * @enum {string} + */ + state?: "open" | "closed"; + /** @description The name of the branch you want your changes pulled into. This should be an existing branch on the current repository. You cannot update the base branch on a pull request to point to another repository. */ + base?: string; + /** @description Indicates whether [maintainers can modify](https://docs.github.com/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork/) the pull request. */ + maintainer_can_modify?: boolean; + }; + }; + }; + }; + /** + * Creates a codespace owned by the authenticated user for the specified pull request. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-with-pr-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } | null; + }; + }; + }; + /** Lists all review comments for a pull request. By default, review comments are in ascending order by ID. */ + "pulls/list-review-comments": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** The direction to sort results. Ignored without `sort` parameter. */ + direction?: "asc" | "desc"; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review-comment"][]; + }; + }; + }; + }; + /** + * Creates a review comment in the pull request diff. To add a regular comment to a pull request timeline, see "[Create an issue comment](https://docs.github.com/rest/reference/issues#create-an-issue-comment)." We recommend creating a review comment using `line`, `side`, and optionally `start_line` and `start_side` if your comment applies to more than one line in the pull request diff. + * + * The `position` parameter is deprecated. If you use `position`, the `line`, `side`, `start_line`, and `start_side` parameters are not required. + * + * **Note:** The position value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/create-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the review comment. */ + body: string; + /** @description The SHA of the commit needing a comment. Not using the latest commit SHA may render your comment outdated if a subsequent commit modifies the line you specify as the `position`. */ + commit_id: string; + /** @description The relative path to the file that necessitates a comment. */ + path: string; + /** + * @deprecated + * @description **This parameter is deprecated. Use `line` instead**. The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note above. + */ + position?: number; + /** + * @description In a split diff view, the side of the diff that the pull request's changes appear on. Can be `LEFT` or `RIGHT`. Use `LEFT` for deletions that appear in red. Use `RIGHT` for additions that appear in green or unchanged lines that appear in white and are shown for context. For a multi-line comment, side represents whether the last line of the comment range is a deletion or addition. For more information, see "[Diff view options](https://docs.github.com/en/articles/about-comparing-branches-in-pull-requests#diff-view-options)" in the GitHub Help documentation. + * @enum {string} + */ + side?: "LEFT" | "RIGHT"; + /** @description The line of the blob in the pull request diff that the comment applies to. For a multi-line comment, the last line of the range that your comment applies to. */ + line: number; + /** @description **Required when using multi-line comments unless using `in_reply_to`**. The `start_line` is the first line in the pull request diff that your multi-line comment applies to. To learn more about multi-line comments, see "[Commenting on a pull request](https://docs.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. */ + start_line?: number; + /** + * @description **Required when using multi-line comments unless using `in_reply_to`**. The `start_side` is the starting side of the diff that the comment applies to. Can be `LEFT` or `RIGHT`. To learn more about multi-line comments, see "[Commenting on a pull request](https://docs.github.com/en/articles/commenting-on-a-pull-request#adding-line-comments-to-a-pull-request)" in the GitHub Help documentation. See `side` in this table for additional context. + * @enum {string} + */ + start_side?: "LEFT" | "RIGHT" | "side"; + /** + * @description The ID of the review comment to reply to. To find the ID of a review comment with ["List review comments on a pull request"](#list-review-comments-on-a-pull-request). When specified, all parameters other than `body` in the request body are ignored. + * @example 2 + */ + in_reply_to?: number; + }; + }; + }; + }; + /** + * Creates a reply to a review comment for a pull request. For the `comment_id`, provide the ID of the review comment you are replying to. This must be the ID of a _top-level review comment_, not a reply to that comment. Replies to replies are not supported. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "pulls/create-reply-for-review-comment": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the comment. */ + comment_id: components["parameters"]["comment-id"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["pull-request-review-comment"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The text of the review comment. */ + body: string; + }; + }; + }; + }; + /** Lists a maximum of 250 commits for a pull request. To receive a complete commit list for pull requests with more than 250 commits, use the [List commits](https://docs.github.com/rest/reference/repos#list-commits) endpoint. */ + "pulls/list-commits": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["commit"][]; + }; + }; + }; + }; + /** **Note:** Responses include a maximum of 3000 files. The paginated response returns 30 files per page by default. */ + "pulls/list-files": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["diff-entry"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + "pulls/check-if-merged": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response if pull request has been merged */ + 204: never; + /** Not Found if pull request has not been merged */ + 404: unknown; + }; + }; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + "pulls/merge": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** if merge was successful */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-merge-result"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + /** Method Not Allowed if merge cannot be performed */ + 405: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + /** Conflict if sha was provided and pull request head did not match */ + 409: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Title for the automatic commit message. */ + commit_title?: string; + /** @description Extra detail to append to automatic commit message. */ + commit_message?: string; + /** @description SHA that pull request head must match to allow merge. */ + sha?: string; + /** + * @description Merge method to use. Possible values are `merge`, `squash` or `rebase`. Default is `merge`. + * @enum {string} + */ + merge_method?: "merge" | "squash" | "rebase"; + } | null; + }; + }; + }; + /** Gets the users or teams whose review is requested for a pull request. Once a requested reviewer submits a review, they are no longer considered a requested reviewer. Their review will instead be returned by the [List reviews for a pull request](https://docs.github.com/rest/pulls/reviews#list-reviews-for-a-pull-request) operation. */ + "pulls/list-requested-reviewers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review-request"]; + }; + }; + }; + }; + /** This endpoint triggers [notifications](https://docs.github.com/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. */ + "pulls/request-reviewers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["pull-request-simple"]; + }; + }; + 403: components["responses"]["forbidden"]; + /** Unprocessable Entity if user is not a collaborator */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of user `login`s that will be requested. */ + reviewers?: string[]; + /** @description An array of team `slug`s that will be requested. */ + team_reviewers?: string[]; + }; + }; + }; + }; + "pulls/remove-requested-reviewers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-simple"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of user `login`s that will be removed. */ + reviewers: string[]; + /** @description An array of team `slug`s that will be removed. */ + team_reviewers?: string[]; + }; + }; + }; + }; + /** The list of reviews returns in chronological order. */ + "pulls/list-reviews": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** The list of reviews returns in chronological order. */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["pull-request-review"][]; + }; + }; + }; + }; + /** + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + * + * Pull request reviews created in the `PENDING` state are not submitted and therefore do not include the `submitted_at` property in the response. To create a pending review for a pull request, leave the `event` parameter blank. For more information about submitting a `PENDING` review, see "[Submit a review for a pull request](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request)." + * + * **Note:** To comment on a specific line in a file, you need to first determine the _position_ of that line in the diff. The GitHub REST API v3 offers the `application/vnd.github.v3.diff` [media type](https://docs.github.com/rest/overview/media-types#commits-commit-comparison-and-pull-requests). To see a pull request diff, add this media type to the `Accept` header of a call to the [single pull request](https://docs.github.com/rest/reference/pulls#get-a-pull-request) endpoint. + * + * The `position` value equals the number of lines down from the first "@@" hunk header in the file you want to add a comment. The line just below the "@@" line is position 1, the next line is position 2, and so on. The position in the diff continues to increase through lines of whitespace and additional hunks until the beginning of a new file. + */ + "pulls/create-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The SHA of the commit that needs a review. Not using the latest commit SHA may render your review comment outdated if a subsequent commit modifies the line you specify as the `position`. Defaults to the most recent commit in the pull request when you do not specify a value. */ + commit_id?: string; + /** @description **Required** when using `REQUEST_CHANGES` or `COMMENT` for the `event` parameter. The body text of the pull request review. */ + body?: string; + /** + * @description The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. By leaving this blank, you set the review action state to `PENDING`, which means you will need to [submit the pull request review](https://docs.github.com/rest/pulls#submit-a-review-for-a-pull-request) when you are ready. + * @enum {string} + */ + event?: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; + /** @description Use the following table to specify the location, destination, and contents of the draft review comment. */ + comments?: { + /** @description The relative path to the file that necessitates a review comment. */ + path: string; + /** @description The position in the diff where you want to add a review comment. Note this value is not the same as the line number in the file. For help finding the position value, read the note below. */ + position?: number; + /** @description Text of the review comment. */ + body: string; + /** @example 28 */ + line?: number; + /** @example RIGHT */ + side?: string; + /** @example 26 */ + start_line?: number; + /** @example LEFT */ + start_side?: string; + }[]; + }; + }; + }; + }; + "pulls/get-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Update the review summary comment with new text. */ + "pulls/update-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The body text of the pull request review. */ + body: string; + }; + }; + }; + }; + "pulls/delete-pending-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + }; + /** List comments for a specific pull request review. */ + "pulls/list-comments-for-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["review-comment"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note:** To dismiss a pull request review on a [protected branch](https://docs.github.com/rest/reference/repos#branches), you must be a repository administrator or be included in the list of people or teams who can dismiss pull request reviews. */ + "pulls/dismiss-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The message for the pull request review dismissal */ + message: string; + /** + * @example "DISMISS" + * @enum {string} + */ + event?: "DISMISS"; + }; + }; + }; + }; + /** Submits a pending review for a pull request. For more information about creating a pending review for a pull request, see "[Create a review for a pull request](https://docs.github.com/rest/pulls#create-a-review-for-a-pull-request)." */ + "pulls/submit-review": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + /** The unique identifier of the review. */ + review_id: components["parameters"]["review-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["pull-request-review"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The body text of the pull request review */ + body?: string; + /** + * @description The review action you want to perform. The review actions include: `APPROVE`, `REQUEST_CHANGES`, or `COMMENT`. When you leave this blank, the API returns _HTTP 422 (Unrecognizable entity)_ and sets the review action state to `PENDING`, which means you will need to re-submit the pull request review using a review action. + * @enum {string} + */ + event: "APPROVE" | "REQUEST_CHANGES" | "COMMENT"; + }; + }; + }; + }; + /** Updates the pull request branch with the latest upstream changes by merging HEAD from the base branch into the pull request branch. */ + "pulls/update-branch": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies the pull request. */ + pull_number: components["parameters"]["pull-number"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": { + message?: string; + url?: string; + }; + }; + }; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The expected SHA of the pull request's HEAD ref. This is the most recent commit on the pull request's branch. If the expected SHA does not match the pull request's HEAD, you will receive a `422 Unprocessable Entity` status. You can use the "[List commits](https://docs.github.com/rest/reference/repos#list-commits)" endpoint to find the most recent commit SHA. Default: SHA of the pull request's current HEAD ref. */ + expected_head_sha?: string; + } | null; + }; + }; + }; + /** + * Gets the preferred README for a repository. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + "repos/get-readme": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-file"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Gets the README from a repository directory. + * + * READMEs support [custom media types](https://docs.github.com/rest/reference/repos#custom-media-types) for retrieving the raw content or rendered HTML. + */ + "repos/get-readme-in-directory": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The alternate path to look for a README file */ + dir: string; + }; + query: { + /** The name of the commit/branch/tag. Default: the repository’s default branch (usually `master`) */ + ref?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-file"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * This returns a list of releases, which does not include regular Git tags that have not been associated with a release. To get a list of Git tags, use the [Repository Tags API](https://docs.github.com/rest/reference/repos#list-repository-tags). + * + * Information about published releases are available to everyone. Only users with push access will receive listings for draft releases. + */ + "repos/list-releases": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["release"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Users with push access to the repository can create a release. + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "repos/create-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["release"]; + }; + }; + /** Not Found if the discussion category name is invalid */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the tag. */ + tag_name: string; + /** @description Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`). */ + target_commitish?: string; + /** @description The name of the release. */ + name?: string; + /** @description Text describing the contents of the tag. */ + body?: string; + /** + * @description `true` to create a draft (unpublished) release, `false` to create a published one. + * @default false + */ + draft?: boolean; + /** + * @description `true` to identify the release as a prerelease. `false` to identify the release as a full release. + * @default false + */ + prerelease?: boolean; + /** @description If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ + discussion_category_name?: string; + /** + * @description Whether to automatically generate the name and body for this release. If `name` is specified, the specified name will be used; otherwise, a name will be automatically generated. If `body` is specified, the body will be pre-pended to the automatically generated notes. + * @default false + */ + generate_release_notes?: boolean; + }; + }; + }; + }; + /** To download the asset's binary content, set the `Accept` header of the request to [`application/octet-stream`](https://docs.github.com/rest/overview/media-types). The API will either redirect the client to the location, or stream it directly if possible. API clients should handle both a `200` or `302` response. */ + "repos/get-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the asset. */ + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + 302: components["responses"]["found"]; + 404: components["responses"]["not_found"]; + }; + }; + "repos/delete-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the asset. */ + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Users with push access to the repository can edit a release asset. */ + "repos/update-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the asset. */ + asset_id: components["parameters"]["asset-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The file name of the asset. */ + name?: string; + /** @description An alternate short description of the asset. Used in place of the filename. */ + label?: string; + /** @example "uploaded" */ + state?: string; + }; + }; + }; + }; + /** Generate a name and body describing a [release](https://docs.github.com/rest/reference/repos#releases). The body content will be markdown formatted and contain information like the changes since last release and users who contributed. The generated release notes are not saved anywhere. They are intended to be generated and used when creating a new release. */ + "repos/generate-release-notes": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Name and body of generated release notes */ + 200: { + content: { + "application/json": components["schemas"]["release-notes-content"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The tag name for the release. This can be an existing tag or a new one. */ + tag_name: string; + /** @description Specifies the commitish value that will be the target for the release's tag. Required if the supplied tag_name does not reference an existing tag. Ignored if the tag_name already exists. */ + target_commitish?: string; + /** @description The name of the previous tag to use as the starting point for the release notes. Use to manually specify the range for the set of changes considered as part this release. */ + previous_tag_name?: string; + /** @description Specifies a path to a file in the repository containing configuration settings used for generating the release notes. If unspecified, the configuration file located in the repository at '.github/release.yml' or '.github/release.yaml' will be used. If that is not present, the default configuration will be used. */ + configuration_file_path?: string; + }; + }; + }; + }; + /** + * View the latest published full release for the repository. + * + * The latest release is the most recent non-prerelease, non-draft release, sorted by the `created_at` attribute. The `created_at` attribute is the date of the commit used for the release, and not the date when the release was drafted or published. + */ + "repos/get-latest-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + }; + }; + /** Get a published release with the specified tag. */ + "repos/get-release-by-tag": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** tag parameter */ + tag: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + "repos/get-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** **Note:** This returns an `upload_url` key corresponding to the endpoint for uploading release assets. This key is a [hypermedia resource](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia). */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Users with push access to the repository can delete a release. */ + "repos/delete-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Users with push access to the repository can edit a release. */ + "repos/update-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["release"]; + }; + }; + /** Not Found if the discussion category name is invalid */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the tag. */ + tag_name?: string; + /** @description Specifies the commitish value that determines where the Git tag is created from. Can be any branch or commit SHA. Unused if the Git tag already exists. Default: the repository's default branch (usually `master`). */ + target_commitish?: string; + /** @description The name of the release. */ + name?: string; + /** @description Text describing the contents of the tag. */ + body?: string; + /** @description `true` makes the release a draft, and `false` publishes the release. */ + draft?: boolean; + /** @description `true` to identify the release as a prerelease, `false` to identify the release as a full release. */ + prerelease?: boolean; + /** @description If specified, a discussion of the specified category is created and linked to the release. The value must be a category that already exists in the repository. If there is already a discussion linked to the release, this parameter is ignored. For more information, see "[Managing categories for discussions in your repository](https://docs.github.com/discussions/managing-discussions-for-your-community/managing-categories-for-discussions-in-your-repository)." */ + discussion_category_name?: string; + }; + }; + }; + }; + "repos/list-release-assets": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["release-asset"][]; + }; + }; + }; + }; + /** + * This endpoint makes use of [a Hypermedia relation](https://docs.github.com/rest/overview/resources-in-the-rest-api#hypermedia) to determine which URL to access. The endpoint you call to upload release assets is specific to your release. Use the `upload_url` returned in + * the response of the [Create a release endpoint](https://docs.github.com/rest/reference/repos#create-a-release) to upload a release asset. + * + * You need to use an HTTP client which supports [SNI](http://en.wikipedia.org/wiki/Server_Name_Indication) to make calls to this endpoint. + * + * Most libraries will set the required `Content-Length` header automatically. Use the required `Content-Type` header to provide the media type of the asset. For a list of media types, see [Media Types](https://www.iana.org/assignments/media-types/media-types.xhtml). For example: + * + * `application/zip` + * + * GitHub expects the asset data in its raw binary form, rather than JSON. You will send the raw binary content of the asset as the request body. Everything else about the endpoint is the same as the rest of the API. For example, + * you'll still need to pass your authentication to be able to upload an asset. + * + * When an upstream failure occurs, you will receive a `502 Bad Gateway` status. This may leave an empty asset with a state of `starter`. It can be safely deleted. + * + * **Notes:** + * * GitHub renames asset filenames that have special characters, non-alphanumeric characters, and leading or trailing periods. The "[List assets for a release](https://docs.github.com/rest/reference/repos#list-assets-for-a-release)" + * endpoint lists the renamed filenames. For more information and help, contact [GitHub Support](https://support.github.com/contact?tags=dotcom-rest-api). + * * If you upload an asset with the same filename as another uploaded asset, you'll receive an error and must delete the old file before you can re-upload the new asset. + */ + "repos/upload-release-asset": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + query: { + name: string; + label?: string; + }; + }; + responses: { + /** Response for successful upload */ + 201: { + content: { + "application/json": components["schemas"]["release-asset"]; + }; + }; + /** Response if you upload an asset with the same filename as another uploaded asset */ + 422: unknown; + }; + requestBody: { + content: { + "*/*": string; + }; + }; + }; + /** List the reactions to a [release](https://docs.github.com/rest/reference/repos#releases). */ + "reactions/list-for-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a release. */ + content?: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** Create a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). A response with a `Status: 200 OK` means that you already added the reaction type to this release. */ + "reactions/create-for-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + }; + }; + responses: { + /** Reaction exists */ + 200: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + /** Reaction created */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the release. + * @enum {string} + */ + content: "+1" | "laugh" | "heart" | "hooray" | "rocket" | "eyes"; + }; + }; + }; + }; + /** + * **Note:** You can also specify a repository by `repository_id` using the route `DELETE delete /repositories/:repository_id/releases/:release_id/reactions/:reaction_id`. + * + * Delete a reaction to a [release](https://docs.github.com/rest/reference/repos#releases). + */ + "reactions/delete-for-release": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the release. */ + release_id: components["parameters"]["release-id"]; + /** The unique identifier of the reaction. */ + reaction_id: components["parameters"]["reaction-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Lists secret scanning alerts for an eligible repository, from newest to oldest. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-alerts-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Set to `open` or `resolved` to only list secret scanning alerts in a specific state. */ + state?: components["parameters"]["secret-scanning-alert-state"]; + /** + * A comma-separated list of secret types to return. By default all secret types are returned. + * See "[Secret scanning patterns](https://docs.github.com/code-security/secret-scanning/secret-scanning-patterns#supported-secrets-for-advanced-security)" + * for a complete list of secret types. + */ + secret_type?: components["parameters"]["secret-scanning-alert-secret-type"]; + /** A comma-separated list of resolutions. Only secret scanning alerts with one of these resolutions are listed. Valid resolutions are `false_positive`, `wont_fix`, `revoked`, `pattern_edited`, `pattern_deleted` or `used_in_tests`. */ + resolution?: components["parameters"]["secret-scanning-alert-resolution"]; + /** The property to sort the results by. `created` means when the alert was created. `updated` means when the alert was updated or resolved. */ + sort?: components["parameters"]["secret-scanning-alert-sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events before this cursor. To receive an initial cursor on your first request, include an empty "before" query string. */ + before?: components["parameters"]["secret-scanning-pagination-before-org-repo"]; + /** A cursor, as given in the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header). If specified, the query only searches for events after this cursor. To receive an initial cursor on your first request, include an empty "after" query string. */ + after?: components["parameters"]["secret-scanning-pagination-after-org-repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"][]; + }; + }; + /** Repository is public or secret scanning is disabled for the repository */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Gets a single secret scanning alert detected in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/get-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"]; + }; + }; + 304: components["responses"]["not_modified"]; + /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Updates the status of a secret scanning alert in an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` write permission to use this endpoint. + */ + "secret-scanning/update-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["secret-scanning-alert"]; + }; + }; + /** Bad request, resolution comment is invalid or the resolution was not changed. */ + 400: unknown; + /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: unknown; + /** State does not match the resolution or resolution comment */ + 422: unknown; + 503: components["responses"]["service_unavailable"]; + }; + requestBody: { + content: { + "application/json": { + state: components["schemas"]["secret-scanning-alert-state"]; + resolution?: components["schemas"]["secret-scanning-alert-resolution"]; + resolution_comment?: components["schemas"]["secret-scanning-alert-resolution-comment"]; + }; + }; + }; + }; + /** + * Lists all locations for a given secret scanning alert for an eligible repository. + * To use this endpoint, you must be an administrator for the repository or for the organization that owns the repository, and you must use a personal access token with the `repo` scope or `security_events` scope. + * For public repositories, you may instead use the `public_repo` scope. + * + * GitHub Apps must have the `secret_scanning_alerts` read permission to use this endpoint. + */ + "secret-scanning/list-locations-for-alert": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The number that identifies an alert. You can find this at the end of the URL for a code scanning alert within GitHub, and in the `number` field in the response from the `GET /repos/{owner}/{repo}/code-scanning/alerts` operation. */ + alert_number: components["parameters"]["alert-number"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["secret-scanning-location"][]; + }; + }; + /** Repository is public, or secret scanning is disabled for the repository, or the resource is not found */ + 404: unknown; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Lists the people that have starred the repository. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + "activity/list-stargazers-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": Partial & + Partial; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + "repos/get-code-frequency-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Returns a weekly aggregate of the number of additions and deletions pushed to a repository. */ + 200: { + content: { + "application/json": components["schemas"]["code-frequency-stat"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** Returns the last year of commit activity grouped by week. The `days` array is a group of commits per day, starting on `Sunday`. */ + "repos/get-commit-activity-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-activity"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Returns the `total` number of commits authored by the contributor. In addition, the response includes a Weekly Hash (`weeks` array) with the following information: + * + * * `w` - Start of the week, given as a [Unix timestamp](http://en.wikipedia.org/wiki/Unix_time). + * * `a` - Number of additions + * * `d` - Number of deletions + * * `c` - Number of commits + */ + "repos/get-contributors-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["contributor-activity"][]; + }; + }; + 202: components["responses"]["accepted"]; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Returns the total commit counts for the `owner` and total commit counts in `all`. `all` is everyone combined, including the `owner` in the last 52 weeks. If you'd like to get the commit counts for non-owners, you can subtract `owner` from `all`. + * + * The array order is oldest week (index 0) to most recent week. + */ + "repos/get-participation-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** The array order is oldest week (index 0) to most recent week. */ + 200: { + content: { + "application/json": components["schemas"]["participation-stats"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Each array contains the day number, hour number, and number of commits: + * + * * `0-6`: Sunday - Saturday + * * `0-23`: Hour of day + * * Number of commits + * + * For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. + */ + "repos/get-punch-card-stats": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** For example, `[2, 14, 25]` indicates that there were 25 total commits, during the 2:00pm hour on Tuesdays. All times are based on the time zone of individual commits. */ + 200: { + content: { + "application/json": components["schemas"]["code-frequency-stat"][]; + }; + }; + 204: components["responses"]["no_content"]; + }; + }; + /** + * Users with push access in a repository can create commit statuses for a given SHA. + * + * Note: there is a limit of 1000 statuses per `sha` and `context` within a repository. Attempts to create more than 1000 statuses will result in a validation error. + */ + "repos/create-commit-status": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + sha: string; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["status"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state of the status. + * @enum {string} + */ + state: "error" | "failure" | "pending" | "success"; + /** + * @description The target URL to associate with this status. This URL will be linked from the GitHub UI to allow users to easily see the source of the status. + * For example, if your continuous integration system is posting build status, you would want to provide the deep link for the build output for this specific SHA: + * `http://ci.example.com/user/repo/build/sha` + */ + target_url?: string | null; + /** @description A short description of the status. */ + description?: string | null; + /** + * @description A string label to differentiate this status from the status of other systems. This field is case-insensitive. + * @default default + */ + context?: string; + }; + }; + }; + }; + /** Lists the people watching the specified repository. */ + "activity/list-watchers-for-repo": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "activity/get-repo-subscription": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** if you subscribe to the repository */ + 200: { + content: { + "application/json": components["schemas"]["repository-subscription"]; + }; + }; + 403: components["responses"]["forbidden"]; + /** Not Found if you don't subscribe to the repository */ + 404: unknown; + }; + }; + /** If you would like to watch a repository, set `subscribed` to `true`. If you would like to ignore notifications made within a repository, set `ignored` to `true`. If you would like to stop watching a repository, [delete the repository's subscription](https://docs.github.com/rest/reference/activity#delete-a-repository-subscription) completely. */ + "activity/set-repo-subscription": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repository-subscription"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description Determines if notifications should be received from this repository. */ + subscribed?: boolean; + /** @description Determines if all notifications should be blocked from this repository. */ + ignored?: boolean; + }; + }; + }; + }; + /** This endpoint should only be used to stop watching a repository. To control whether or not you wish to receive notifications from a repository, [set the repository's subscription manually](https://docs.github.com/rest/reference/activity#set-a-repository-subscription). */ + "activity/delete-repo-subscription": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + "repos/list-tags": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["tag"][]; + }; + }; + }; + }; + /** + * This returns the tag protection states of a repository. + * + * This information is only available to repository administrators. + */ + "repos/list-tag-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["tag-protection"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * This creates a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + "repos/create-tag-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["tag-protection"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An optional glob pattern to match against when enforcing tag protection. */ + pattern: string; + }; + }; + }; + }; + /** + * This deletes a tag protection state for a repository. + * This endpoint is only available to repository administrators. + */ + "repos/delete-tag-protection": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + /** The unique identifier of the tag protection. */ + tag_protection_id: components["parameters"]["tag-protection-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a redirect URL to download a tar archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * **Note**: For private repositories, these links are temporary and expire after five minutes. + */ + "repos/download-tarball-archive": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + ref: string; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + "repos/list-teams": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + }; + }; + "repos/get-all-topics": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["topic"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + "repos/replace-all-topics": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["topic"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of topics to add to the repository. Pass one or more topics to _replace_ the set of existing topics. Send an empty array (`[]`) to clear all topics from the repository. **Note:** Topic `names` cannot contain uppercase letters. */ + names: string[]; + }; + }; + }; + }; + /** Get the total number of clones and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + "repos/get-clones": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The time frame to display results for. */ + per?: components["parameters"]["per"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["clone-traffic"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the top 10 popular contents over the last 14 days. */ + "repos/get-top-paths": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["content-traffic"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the top 10 referrers over the last 14 days. */ + "repos/get-top-referrers": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["referrer-traffic"][]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** Get the total number of views and breakdown per day or week for the last 14 days. Timestamps are aligned to UTC midnight of the beginning of the day or week. Week begins on Monday. */ + "repos/get-views": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + query: { + /** The time frame to display results for. */ + per?: components["parameters"]["per"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["view-traffic"]; + }; + }; + 403: components["responses"]["forbidden"]; + }; + }; + /** A transfer request will need to be accepted by the new owner when transferring a personal repository to another user. The response will contain the original `owner`, and the transfer will continue asynchronously. For more details on the requirements to transfer personal and organization-owned repositories, see [about repository transfers](https://docs.github.com/articles/about-repository-transfers/). */ + "repos/transfer": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["minimal-repository"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The username or organization name the repository will be transferred to. */ + new_owner: string; + /** @description ID of the team or teams to add to the repository. Teams can only be added to organization-owned repositories. */ + team_ids?: number[]; + }; + }; + }; + }; + /** Shows whether dependency alerts are enabled or disabled for a repository. The authenticated user must have admin read access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + "repos/check-vulnerability-alerts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response if repository is enabled with vulnerability alerts */ + 204: never; + /** Not Found if repository is not enabled with vulnerability alerts */ + 404: unknown; + }; + }; + /** Enables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + "repos/enable-vulnerability-alerts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** Disables dependency alerts and the dependency graph for a repository. The authenticated user must have admin access to the repository. For more information, see "[About security alerts for vulnerable dependencies](https://docs.github.com/en/articles/about-security-alerts-for-vulnerable-dependencies)". */ + "repos/disable-vulnerability-alerts": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * Gets a redirect URL to download a zip archive for a repository. If you omit `:ref`, the repository’s default branch (usually + * `master`) will be used. Please make sure your HTTP framework is configured to follow redirects or you will need to use + * the `Location` header to make a second `GET` request. + * + * **Note**: For private repositories, these links are temporary and expire after five minutes. If the repository is empty, you will receive a 404 when you follow the redirect. + */ + "repos/download-zipball-archive": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + ref: string; + }; + }; + responses: { + /** Response */ + 302: never; + }; + }; + /** + * Creates a new repository using a repository template. Use the `template_owner` and `template_repo` route parameters to specify the repository to use as the template. If the repository is not public, the authenticated user must own or be a member of an organization that owns the repository. To check if a repository is available to use as a template, get the repository's information using the [Get a repository](https://docs.github.com/rest/reference/repos#get-a-repository) endpoint and check that the `is_template` key is `true`. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository + */ + "repos/create-using-template": { + parameters: { + path: { + template_owner: string; + template_repo: string; + }; + }; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The organization or person who will own the new repository. To create a new repository in an organization, the authenticated user must be a member of the specified organization. */ + owner?: string; + /** @description The name of the new repository. */ + name: string; + /** @description A short description of the new repository. */ + description?: string; + /** + * @description Set to `true` to include the directory structure and files from all branches in the template repository, and not just the default branch. Default: `false`. + * @default false + */ + include_all_branches?: boolean; + /** + * @description Either `true` to create a new private repository or `false` to create a new public one. + * @default false + */ + private?: boolean; + }; + }; + }; + }; + /** + * Lists all public repositories in the order that they were created. + * + * Note: + * - For GitHub Enterprise Server, this endpoint will only list repositories available to all users on the enterprise. + * - Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of repositories. + */ + "repos/list-public": { + parameters: { + query: { + /** A repository ID. Only return repositories with an ID greater than this ID. */ + since?: components["parameters"]["since-repo"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Lists all secrets available in an environment without revealing their encrypted values. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/list-environment-secrets": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["actions-secret"][]; + }; + }; + }; + }; + }; + /** Get the public key for an environment, which you need to encrypt environment secrets. You need to encrypt a secret before you can create or update secrets. Anyone with read access to the repository can use this endpoint. If the repository is private you must use an access token with the `repo` scope. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-environment-public-key": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-public-key"]; + }; + }; + }; + }; + /** Gets a single environment secret without revealing its encrypted value. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/get-environment-secret": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-secret"]; + }; + }; + }; + }; + /** + * Creates or updates an environment secret with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). You must authenticate using an access + * token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use + * this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "actions/create-or-update-environment-secret": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response when creating a secret */ + 201: { + content: { + "application/json": components["schemas"]["empty-object"]; + }; + }; + /** Response when updating a secret */ + 204: never; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get an environment public key](https://docs.github.com/rest/reference/actions#get-an-environment-public-key) endpoint. */ + encrypted_value: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id: string; + }; + }; + }; + }; + /** Deletes a secret in an environment using the secret name. You must authenticate using an access token with the `repo` scope to use this endpoint. GitHub Apps must have the `secrets` repository permission to use this endpoint. */ + "actions/delete-environment-secret": { + parameters: { + path: { + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + /** The name of the environment. */ + environment_name: components["parameters"]["environment-name"]; + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Default response */ + 204: never; + }; + }; + /** + * Searches for query terms inside of a file. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for code, you can get text match metadata for the file **content** and file **path** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the definition of the `addClass` function inside [jQuery](https://github.com/jquery/jquery) repository, your query would look something like this: + * + * `q=addClass+in:file+language:js+repo:jquery/jquery` + * + * This query searches for the keyword `addClass` within a file's contents. The query limits the search to files where the language is JavaScript in the `jquery/jquery` repository. + * + * #### Considerations for code search + * + * Due to the complexity of searching code, there are a few restrictions on how searches are performed: + * + * * Only the _default branch_ is considered. In most cases, this will be the `master` branch. + * * Only files smaller than 384 KB are searchable. + * * You must always include at least one search term when searching source code. For example, searching for [`language:go`](https://github.com/search?utf8=%E2%9C%93&q=language%3Ago&type=Code) is not valid, while [`amazing + * language:go`](https://github.com/search?utf8=%E2%9C%93&q=amazing+language%3Ago&type=Code) is. + */ + "search/code": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching code](https://docs.github.com/search-github/searching-on-github/searching-code)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query. Can only be `indexed`, which indicates how recently a file has been indexed by the GitHub search infrastructure. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "indexed"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["code-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Find commits via various criteria on the default branch (usually `master`). This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for commits, you can get text match metadata for the **message** field when you provide the `text-match` media type. For more details about how to receive highlighted search results, see [Text match + * metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find commits related to CSS in the [octocat/Spoon-Knife](https://github.com/octocat/Spoon-Knife) repository. Your query would look something like this: + * + * `q=repo:octocat/Spoon-Knife+css` + */ + "search/commits": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching commits](https://docs.github.com/search-github/searching-on-github/searching-commits)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by `author-date` or `committer-date`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "author-date" | "committer-date"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["commit-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Find issues by state and keyword. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for issues, you can get text match metadata for the issue **title**, issue **body**, and issue **comment body** fields when you pass the `text-match` media type. For more details about how to receive highlighted + * search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find the oldest unresolved Python bugs on Windows. Your query might look something like this. + * + * `q=windows+label:bug+language:python+state:open&sort=created&order=asc` + * + * This query searches for the keyword `windows`, within any open issue that is labeled as `bug`. The search runs across repositories whose primary language is Python. The results are sorted by creation date in ascending order, which means the oldest issues appear first in the search results. + * + * **Note:** For [user-to-server](https://docs.github.com/developers/apps/identifying-and-authorizing-users-for-github-apps#user-to-server-requests) GitHub App requests, you can't retrieve a combination of issues and pull requests in a single query. Requests that don't include the `is:issue` or `is:pull-request` qualifier will receive an HTTP `422 Unprocessable Entity` response. To get results for both issues and pull requests, you must send separate queries for issues and pull requests. For more information about the `is` qualifier, see "[Searching only issues or pull requests](https://docs.github.com/github/searching-for-information-on-github/searching-issues-and-pull-requests#search-only-issues-or-pull-requests)." + */ + "search/issues-and-pull-requests": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching issues and pull requests](https://docs.github.com/search-github/searching-on-github/searching-issues-and-pull-requests)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by the number of `comments`, `reactions`, `reactions-+1`, `reactions--1`, `reactions-smile`, `reactions-thinking_face`, `reactions-heart`, `reactions-tada`, or `interactions`. You can also sort results by how recently the items were `created` or `updated`, Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: + | "comments" + | "reactions" + | "reactions-+1" + | "reactions--1" + | "reactions-smile" + | "reactions-thinking_face" + | "reactions-heart" + | "reactions-tada" + | "interactions" + | "created" + | "updated"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["issue-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Find labels in a repository with names or descriptions that match search keywords. Returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for labels, you can get text match metadata for the label **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to find labels in the `linguist` repository that match `bug`, `defect`, or `enhancement`. Your query might look like this: + * + * `q=bug+defect+enhancement&repository_id=64778136` + * + * The labels that best match the query appear first in the search results. + */ + "search/labels": { + parameters: { + query: { + /** The id of the repository. */ + repository_id: number; + /** The search keywords. This endpoint does not accept qualifiers in the query. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ + q: string; + /** Sorts the results of your query by when the label was `created` or `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "created" | "updated"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["label-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Find repositories via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for repositories, you can get text match metadata for the **name** and **description** fields when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for popular Tetris repositories written in assembly code, your query might look like this: + * + * `q=tetris+language:assembly&sort=stars&order=desc` + * + * This query searches for repositories with the word `tetris` in the name, the description, or the README. The results are limited to repositories where the primary language is assembly. The results are sorted by stars in descending order, so that the most popular repositories appear first in the search results. + */ + "search/repos": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching for repositories](https://docs.github.com/articles/searching-for-repositories/)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by number of `stars`, `forks`, or `help-wanted-issues` or how recently the items were `updated`. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "stars" | "forks" | "help-wanted-issues" | "updated"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["repo-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** + * Find topics via various criteria. Results are sorted by best match. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). See "[Searching topics](https://docs.github.com/articles/searching-topics/)" for a detailed list of qualifiers. + * + * When searching for topics, you can get text match metadata for the topic's **short\_description**, **description**, **name**, or **display\_name** field when you pass the `text-match` media type. For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you want to search for topics related to Ruby that are featured on https://github.com/topics. Your query might look like this: + * + * `q=ruby+is:featured` + * + * This query searches for topics with the keyword `ruby` and limits the results to find only topics that are featured. The topics that are the best match for the query appear first in the search results. + */ + "search/topics": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). */ + q: string; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["topic-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Find users via various criteria. This method returns up to 100 results [per page](https://docs.github.com/rest/overview/resources-in-the-rest-api#pagination). + * + * When searching for users, you can get text match metadata for the issue **login**, public **email**, and **name** fields when you pass the `text-match` media type. For more details about highlighting search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). For more details about how to receive highlighted search results, see [Text match metadata](https://docs.github.com/rest/reference/search#text-match-metadata). + * + * For example, if you're looking for a list of popular users, you might try this query: + * + * `q=tom+repos:%3E42+followers:%3E1000` + * + * This query searches for users with the name `tom`. The results are restricted to users with more than 42 repositories and over 1,000 followers. + */ + "search/users": { + parameters: { + query: { + /** The query contains one or more search keywords and qualifiers. Qualifiers allow you to limit your search to specific areas of GitHub. The REST API supports the same qualifiers as the web interface for GitHub. To learn more about the format of the query, see [Constructing a search query](https://docs.github.com/rest/reference/search#constructing-a-search-query). See "[Searching users](https://docs.github.com/search-github/searching-on-github/searching-users)" for a detailed list of qualifiers. */ + q: string; + /** Sorts the results of your query by number of `followers` or `repositories`, or when the person `joined` GitHub. Default: [best match](https://docs.github.com/rest/reference/search#ranking-search-results) */ + sort?: "followers" | "repositories" | "joined"; + /** Determines whether the first search result returned is the highest number of matches (`desc`) or lowest number of matches (`asc`). This parameter is ignored unless you provide `sort`. */ + order?: components["parameters"]["order"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + incomplete_results: boolean; + items: components["schemas"]["user-search-result-item"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 422: components["responses"]["validation_failed"]; + 503: components["responses"]["service_unavailable"]; + }; + }; + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the [Get a team by name](https://docs.github.com/rest/reference/teams#get-a-team-by-name) endpoint. */ + "teams/get-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a team](https://docs.github.com/rest/reference/teams#delete-a-team) endpoint. + * + * To delete a team, the authenticated user must be an organization owner or team maintainer. + * + * If you are an organization owner, deleting a parent team will delete all of its child teams as well. + */ + "teams/delete-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a team](https://docs.github.com/rest/reference/teams#update-a-team) endpoint. + * + * To edit a team, the authenticated user must either be an organization owner or a team maintainer. + * + * **Note:** With nested teams, the `privacy` for parent teams cannot be `secret`. + */ + "teams/update-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response when the updated information already exists */ + 200: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-full"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description The name of the team. */ + name: string; + /** @description The description of the team. */ + description?: string; + /** + * @description The level of privacy this team should have. Editing teams without specifying this parameter leaves `privacy` intact. The options are: + * **For a non-nested team:** + * \* `secret` - only visible to organization owners and members of this team. + * \* `closed` - visible to all members of this organization. + * **For a parent or child team:** + * \* `closed` - visible to all members of this organization. + * @enum {string} + */ + privacy?: "secret" | "closed"; + /** + * @description **Deprecated**. The permission that new repositories will be added to the team with when none is specified. + * @default pull + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + /** @description The ID of a team to set as the parent team. */ + parent_team_id?: number | null; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List discussions`](https://docs.github.com/rest/reference/teams#list-discussions) endpoint. + * + * List all discussions on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/list-discussions-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create a discussion`](https://docs.github.com/rest/reference/teams#create-a-discussion) endpoint. + * + * Creates a new discussion post on a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "teams/create-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title: string; + /** @description The discussion post's body text. */ + body: string; + /** + * @description Private posts are only visible to team members, organization owners, and team maintainers. Public posts are visible to all members of the organization. Set to `true` to create a private post. + * @default false + */ + private?: boolean; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion](https://docs.github.com/rest/reference/teams#get-a-discussion) endpoint. + * + * Get a specific discussion on a team's page. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/get-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Delete a discussion`](https://docs.github.com/rest/reference/teams#delete-a-discussion) endpoint. + * + * Delete a discussion from a team's page. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/delete-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion](https://docs.github.com/rest/reference/teams#update-a-discussion) endpoint. + * + * Edits the title and body text of a discussion post. Only the parameters you provide are updated. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/update-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion post's title. */ + title?: string; + /** @description The discussion post's body text. */ + body?: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List discussion comments](https://docs.github.com/rest/reference/teams#list-discussion-comments) endpoint. + * + * List all comments on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/list-discussion-comments-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-discussion-comment"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Create a discussion comment](https://docs.github.com/rest/reference/teams#create-a-discussion-comment) endpoint. + * + * Creates a new comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + * + * This endpoint triggers [notifications](https://docs.github.com/en/github/managing-subscriptions-and-notifications-on-github/about-notifications). Creating content too quickly using this endpoint may result in secondary rate limiting. See "[Secondary rate limits](https://docs.github.com/rest/overview/resources-in-the-rest-api#secondary-rate-limits)" and "[Dealing with secondary rate limits](https://docs.github.com/rest/guides/best-practices-for-integrators#dealing-with-secondary-rate-limits)" for details. + */ + "teams/create-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get a discussion comment](https://docs.github.com/rest/reference/teams#get-a-discussion-comment) endpoint. + * + * Get a specific comment on a team discussion. OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/get-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Delete a discussion comment](https://docs.github.com/rest/reference/teams#delete-a-discussion-comment) endpoint. + * + * Deletes a comment on a team discussion. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/delete-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Update a discussion comment](https://docs.github.com/rest/reference/teams#update-a-discussion-comment) endpoint. + * + * Edits the body text of a discussion comment. OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "teams/update-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-discussion-comment"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** @description The discussion comment's body text. */ + body: string; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion comment`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment) endpoint. + * + * List the reactions to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/list-for-team-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion comment. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Create reaction for a team discussion comment](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment)" endpoint. + * + * Create a reaction to a [team discussion comment](https://docs.github.com/rest/reference/teams#discussion-comments). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion comment. + */ + "reactions/create-for-team-discussion-comment-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + /** The number that identifies the comment. */ + comment_number: components["parameters"]["comment-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion comment. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List reactions for a team discussion`](https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion) endpoint. + * + * List the reactions to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `read:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). + */ + "reactions/list-for-team-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + query: { + /** Returns a single [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types). Omit this parameter to list all reactions to a team discussion. */ + content?: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["reaction"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`Create reaction for a team discussion`](https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion) endpoint. + * + * Create a reaction to a [team discussion](https://docs.github.com/rest/reference/teams#discussions). OAuth access tokens require the `write:discussion` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). A response with an HTTP `200` status means that you already added the reaction type to this team discussion. + */ + "reactions/create-for-team-discussion-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The number that identifies the discussion. */ + discussion_number: components["parameters"]["discussion-number"]; + }; + }; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["reaction"]; + }; + }; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The [reaction type](https://docs.github.com/rest/reference/reactions#reaction-types) to add to the team discussion. + * @enum {string} + */ + content: + | "+1" + | "-1" + | "laugh" + | "confused" + | "heart" + | "hooray" + | "rocket" + | "eyes"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List pending team invitations`](https://docs.github.com/rest/reference/teams#list-pending-team-invitations) endpoint. + * + * The return hash contains a `role` field which refers to the Organization Invitation role and will be one of the following values: `direct_member`, `admin`, `billing_manager`, `hiring_manager`, or `reinstate`. If the invitee is not a GitHub member, the `login` field in the return hash will be `null`. + */ + "teams/list-pending-invitations-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-invitation"][]; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team members`](https://docs.github.com/rest/reference/teams#list-team-members) endpoint. + * + * Team members will include the members of child teams. + */ + "teams/list-members-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** Filters members returned by their role in the team. */ + role?: "member" | "maintainer" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * The "Get team member" endpoint (described below) is deprecated. + * + * We recommend using the [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint instead. It allows you to get both active and pending memberships. + * + * To list members in a team, the team must be visible to the authenticated user. + */ + "teams/get-member-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** if user is a member */ + 204: never; + /** if user is not a member */ + 404: unknown; + }; + }; + /** + * The "Add team member" endpoint (described below) is deprecated. + * + * We recommend using the [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint instead. It allows you to invite new organization members to your teams. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To add someone to a team, the authenticated user must be an organization owner or a team maintainer in the team they're changing. The person being added to the team must be a member of the team's organization. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "teams/add-member-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + /** Not Found if team synchronization is set up */ + 404: unknown; + /** Unprocessable Entity if you attempt to add an organization to a team or you attempt to add a user to a team when they are not a member of at least one other team in the same organization */ + 422: unknown; + }; + }; + /** + * The "Remove team member" endpoint (described below) is deprecated. + * + * We recommend using the [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint instead. It allows you to remove both active and pending memberships. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a team member, the authenticated user must have 'admin' permissions to the team or be an owner of the org that the team is associated with. Removing a team member does not delete the user, it just removes them from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + "teams/remove-member-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Not Found if team synchronization is setup */ + 404: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Get team membership for a user](https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user) endpoint. + * + * Team members will include the members of child teams. + * + * To get a user's membership with a team, the team must be visible to the authenticated user. + * + * **Note:** + * The response contains the `state` of the membership and the member's `role`. + * + * The `role` for organization owners is set to `maintainer`. For more information about `maintainer` roles, see [Create a team](https://docs.github.com/rest/reference/teams#create-a-team). + */ + "teams/get-membership-for-user-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team membership for a user](https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * If the user is already a member of the team's organization, this endpoint will add the user to the team. To add a membership between an organization member and a team, the authenticated user must be an organization owner or a team maintainer. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + * + * If the user is unaffiliated with the team's organization, this endpoint will send an invitation to the user via email. This newly-created membership will be in the "pending" state until the user accepts the invitation, at which point the membership will transition to the "active" state and the user will be added as a member of the team. To add a membership between an unaffiliated user and a team, the authenticated user must be an organization owner. + * + * If the user is already a member of the team, this endpoint will update the role of the team member's role. To update the membership of a team member, the authenticated user must be an organization owner or a team maintainer. + */ + "teams/add-or-update-membership-for-user-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-membership"]; + }; + }; + /** Forbidden if team synchronization is set up */ + 403: unknown; + 404: components["responses"]["not_found"]; + /** Unprocessable Entity if you attempt to add an organization to a team */ + 422: unknown; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The role that this user should have in the team. + * @default member + * @enum {string} + */ + role?: "member" | "maintainer"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove team membership for a user](https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user) endpoint. + * + * Team synchronization is available for organizations using GitHub Enterprise Cloud. For more information, see [GitHub's products](https://docs.github.com/github/getting-started-with-github/githubs-products) in the GitHub Help documentation. + * + * To remove a membership between a user and a team, the authenticated user must have 'admin' permissions to the team or be an owner of the organization that the team is associated with. Removing team membership does not delete the user, it just removes their membership from the team. + * + * **Note:** When you have team synchronization set up for a team with your organization's identity provider (IdP), you will see an error if you attempt to use the API for making changes to the team's membership. If you have access to manage group membership in your IdP, you can manage GitHub team membership through your identity provider, which automatically adds and removes team members in an organization. For more information, see "[Synchronizing teams between your identity provider and GitHub](https://docs.github.com/articles/synchronizing-teams-between-your-identity-provider-and-github/)." + */ + "teams/remove-membership-for-user-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** if team synchronization is set up */ + 403: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List team projects`](https://docs.github.com/rest/reference/teams#list-team-projects) endpoint. + * + * Lists the organization projects for a team. + */ + "teams/list-projects-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-project"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a project](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project) endpoint. + * + * Checks whether a team has `read`, `write`, or `admin` permissions for an organization project. The response includes projects inherited from a parent team. + */ + "teams/check-permissions-for-project-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["team-project"]; + }; + }; + /** Not Found if project is not managed by this team */ + 404: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Add or update team project permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions) endpoint. + * + * Adds an organization project to a team. To add a project to a team or update the team's permission on a project, the authenticated user must have `admin` permissions for the project. The project and team must be part of the same organization. + */ + "teams/add-or-update-project-permissions-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + /** Forbidden if the project is not owned by the organization */ + 403: { + content: { + "application/json": { + message?: string; + documentation_url?: string; + }; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant to the team for this project. Default: the team's `permission` attribute will be used to determine what permission to grant the team on this project. Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * @enum {string} + */ + permission?: "read" | "write" | "admin"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a project from a team](https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team) endpoint. + * + * Removes an organization project from a team. An organization owner or a team maintainer can remove any project from the team. To remove a project from a team as an organization member, the authenticated user must have `read` access to both the team and project, or `admin` access to the team or project. **Note:** This endpoint removes the project from the team, but does not delete it. + */ + "teams/remove-project-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The unique identifier of the project. */ + project_id: components["parameters"]["project-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [List team repositories](https://docs.github.com/rest/reference/teams#list-team-repositories) endpoint. */ + "teams/list-repos-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * **Note**: Repositories inherited through a parent team will also be checked. + * + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Check team permissions for a repository](https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-repository) endpoint. + * + * You can also get information about the specified repository, including what permissions the team grants on it, by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: + */ + "teams/check-permissions-for-repo-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Alternative response with extra repository information */ + 200: { + content: { + "application/json": components["schemas"]["team-repository"]; + }; + }; + /** Response if repository is managed by this team */ + 204: never; + /** Not Found if repository is not managed by this team */ + 404: unknown; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new "[Add or update team repository permissions](https://docs.github.com/rest/reference/teams#add-or-update-team-repository-permissions)" endpoint. + * + * To add a repository to a team or update the team's permission on a repository, the authenticated user must have admin access to the repository, and must be able to see the team. The repository must be owned by the organization, or a direct fork of a repository owned by the organization. You will get a `422 Unprocessable Entity` status if you attempt to add a repository to a team that is not owned by the organization. + * + * Note that, if you choose not to pass any parameters, you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + */ + "teams/add-or-update-repo-permissions-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The permission to grant the team on this repository. If no permission is specified, the team's `permission` attribute will be used to determine what permission to grant the team on this repository. + * @enum {string} + */ + permission?: "pull" | "push" | "admin"; + }; + }; + }; + }; + /** + * **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [Remove a repository from a team](https://docs.github.com/rest/reference/teams#remove-a-repository-from-a-team) endpoint. + * + * If the authenticated user is an organization owner or a team maintainer, they can remove any repositories from the team. To remove a repository from a team as an organization member, the authenticated user must have admin access to the repository and must be able to see the team. NOTE: This does not delete the repository, it just removes it from the team. + */ + "teams/remove-repo-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** **Deprecation Notice:** This endpoint route is deprecated and will be removed from the Teams API. We recommend migrating your existing code to use the new [`List child teams`](https://docs.github.com/rest/reference/teams#list-child-teams) endpoint. */ + "teams/list-child-legacy": { + parameters: { + path: { + /** The unique identifier of the team. */ + team_id: components["parameters"]["team-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** if child teams exist */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team"][]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * If the authenticated user is authenticated through basic authentication or OAuth with the `user` scope, then the response lists public and private profile information. + * + * If the authenticated user is authenticated through OAuth without the `user` scope, then the response lists only public profile information. + */ + "users/get-authenticated": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": + | components["schemas"]["private-user"] + | components["schemas"]["public-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** **Note:** If your email is set to private and you send an `email` parameter as part of this request to update your profile, your privacy settings are still enforced: the email address will not be displayed on your public profile or via the API. */ + "users/update-authenticated": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["private-user"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The new name of the user. + * @example Omar Jahandar + */ + name?: string; + /** + * @description The publicly visible email address of the user. + * @example omar@example.com + */ + email?: string; + /** + * @description The new blog URL of the user. + * @example blog.example.com + */ + blog?: string; + /** + * @description The new Twitter username of the user. + * @example therealomarj + */ + twitter_username?: string | null; + /** + * @description The new company of the user. + * @example Acme corporation + */ + company?: string; + /** + * @description The new location of the user. + * @example Berlin, Germany + */ + location?: string; + /** @description The new hiring availability of the user. */ + hireable?: boolean; + /** @description The new short biography of the user. */ + bio?: string; + }; + }; + }; + }; + /** List the users you've blocked on your personal account. */ + "users/list-blocked-by-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "users/check-blocked": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** If the user is blocked */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** If the user is not blocked */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + "users/block": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "users/unblock": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists the authenticated user's codespaces. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** ID of the Repository to filter on */ + repository_id?: components["parameters"]["repository-id-in-query"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + codespaces: components["schemas"]["codespace"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Creates a new codespace, owned by the authenticated user. + * + * This endpoint requires either a `repository_id` OR a `pull_request` but not both. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/create-for-authenticated-user": { + responses: { + /** Response when the codespace was successfully created */ + 201: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + /** Response when the codespace creation partially failed but is being retried in the background */ + 202: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": + | { + /** @description Repository id for this codespace */ + repository_id: number; + /** @description Git ref (typically a branch name) for this codespace */ + ref?: string; + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description IP for location auto-detection when proxying a request */ + client_ip?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Whether to authorize requested permissions from devcontainer.json */ + multi_repo_permissions_opt_out?: boolean; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Duration in minutes after codespace has gone idle in which it will be deleted. Must be integer minutes between 0 and 43200 (30 days). */ + retention_period_minutes?: number; + } + | { + /** @description Pull request number for this codespace */ + pull_request: { + /** @description Pull request number */ + pull_request_number: number; + /** @description Repository id for this codespace */ + repository_id: number; + }; + /** @description Location for this codespace. Assigned by IP if not provided */ + location?: string; + /** @description Machine type to use for this codespace */ + machine?: string; + /** @description Path to devcontainer.json config to use for this codespace */ + devcontainer_path?: string; + /** @description Working directory for this codespace */ + working_directory?: string; + /** @description Time in minutes before codespace stops from inactivity */ + idle_timeout_minutes?: number; + }; + }; + }; + }; + /** + * Lists all secrets available for a user's Codespaces without revealing their + * encrypted values. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/list-secrets-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + secrets: components["schemas"]["codespaces-secret"][]; + }; + }; + }; + }; + }; + /** + * Gets your public key, which you need to encrypt secrets. You need to encrypt a secret before you can create or update secrets. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/get-public-key-for-authenticated-user": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-user-public-key"]; + }; + }; + }; + }; + /** + * Gets a secret available to a user's codespaces without revealing its encrypted value. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/get-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespaces-secret"]; + }; + }; + }; + }; + /** + * Creates or updates a secret for a user's codespace with an encrypted value. Encrypt your secret using + * [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages). + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must also have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + * + * #### Example encrypting a secret using Node.js + * + * Encrypt your secret using the [tweetsodium](https://github.com/github/tweetsodium) library. + * + * ``` + * const sodium = require('tweetsodium'); + * + * const key = "base64-encoded-public-key"; + * const value = "plain-text-secret"; + * + * // Convert the message and key to Uint8Array's (Buffer implements that interface) + * const messageBytes = Buffer.from(value); + * const keyBytes = Buffer.from(key, 'base64'); + * + * // Encrypt using LibSodium. + * const encryptedBytes = sodium.seal(messageBytes, keyBytes); + * + * // Base64 the encrypted secret + * const encrypted = Buffer.from(encryptedBytes).toString('base64'); + * + * console.log(encrypted); + * ``` + * + * + * #### Example encrypting a secret using Python + * + * Encrypt your secret using [pynacl](https://pynacl.readthedocs.io/en/latest/public/#nacl-public-sealedbox) with Python 3. + * + * ``` + * from base64 import b64encode + * from nacl import encoding, public + * + * def encrypt(public_key: str, secret_value: str) -> str: + * """Encrypt a Unicode string using the public key.""" + * public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder()) + * sealed_box = public.SealedBox(public_key) + * encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) + * return b64encode(encrypted).decode("utf-8") + * ``` + * + * #### Example encrypting a secret using C# + * + * Encrypt your secret using the [Sodium.Core](https://www.nuget.org/packages/Sodium.Core/) package. + * + * ``` + * var secretValue = System.Text.Encoding.UTF8.GetBytes("mySecret"); + * var publicKey = Convert.FromBase64String("2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvvcCU="); + * + * var sealedPublicKeyBox = Sodium.SealedPublicKeyBox.Create(secretValue, publicKey); + * + * Console.WriteLine(Convert.ToBase64String(sealedPublicKeyBox)); + * ``` + * + * #### Example encrypting a secret using Ruby + * + * Encrypt your secret using the [rbnacl](https://github.com/RubyCrypto/rbnacl) gem. + * + * ```ruby + * require "rbnacl" + * require "base64" + * + * key = Base64.decode64("+ZYvJDZMHUfBkJdyq5Zm9SKqeuBQ4sj+6sfjlH4CgG0=") + * public_key = RbNaCl::PublicKey.new(key) + * + * box = RbNaCl::Boxes::Sealed.from_public_key(public_key) + * encrypted_secret = box.encrypt("my_secret") + * + * # Print the base64 encoded secret + * puts Base64.strict_encode64(encrypted_secret) + * ``` + */ + "codespaces/create-or-update-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response after successfully creaing a secret */ + 201: { + content: { + "application/json": { [key: string]: unknown }; + }; + }; + /** Response after successfully updating a secret */ + 204: never; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Value for your secret, encrypted with [LibSodium](https://libsodium.gitbook.io/doc/bindings_for_other_languages) using the public key retrieved from the [Get the public key for the authenticated user](https://docs.github.com/rest/reference/codespaces#get-the-public-key-for-the-authenticated-user) endpoint. */ + encrypted_value?: string; + /** @description ID of the key you used to encrypt the secret. */ + key_id: string; + /** @description An array of repository ids that can access the user secret. You can manage the list of selected repositories using the [List selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret), [Set selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret), and [Remove a selected repository from a user secret](https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret) endpoints. */ + selected_repository_ids?: string[]; + }; + }; + }; + }; + /** + * Deletes a secret from a user's codespaces using the secret name. Deleting the secret will remove access from all codespaces that were allowed to access the secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/delete-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List the repositories that have been granted the ability to use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + "codespaces/list-repositories-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + repositories: components["schemas"]["minimal-repository"][]; + }; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Select the repositories that will use a user's codespace secret. + * + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on all referenced repositories to use this endpoint. + */ + "codespaces/set-repositories-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + }; + }; + responses: { + /** No Content when repositories were added to the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + requestBody: { + content: { + "application/json": { + /** @description An array of repository ids for which a codespace can access the secret. You can manage the list of selected repositories using the [List selected repositories for a user secret](https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret), [Add a selected repository to a user secret](https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret), and [Remove a selected repository from a user secret](https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret) endpoints. */ + selected_repository_ids: number[]; + }; + }; + }; + }; + /** + * Adds a repository to the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission and write access to the `codespaces_secrets` repository permission on the referenced repository to use this endpoint. + */ + "codespaces/add-repository-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was added to the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Removes a repository from the selected repositories for a user's codespace secret. + * You must authenticate using an access token with the `codespace` or `codespace:secrets` scope to use this endpoint. User must have Codespaces access to use this endpoint. + * GitHub Apps must have write access to the `codespaces_user_secrets` user permission to use this endpoint. + */ + "codespaces/remove-repository-for-secret-for-authenticated-user": { + parameters: { + path: { + /** The name of the secret. */ + secret_name: components["parameters"]["secret-name"]; + repository_id: number; + }; + }; + responses: { + /** No Content when repository was removed from the selected list */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets information about a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/get-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Deletes a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/delete-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + 202: components["responses"]["accepted"]; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Updates a codespace owned by the authenticated user. Currently only the codespace's machine type and recent folders can be modified using this endpoint. + * + * If you specify a new machine type it will be applied the next time your codespace is started. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces` repository permission to use this endpoint. + */ + "codespaces/update-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A valid machine to transition this codespace to. */ + machine?: string; + /** @description Display name for this codespace */ + display_name?: string; + /** @description Recently opened folders inside the codespace. It is currently used by the clients to determine the folder path to load the codespace in. */ + recent_folders?: string[]; + }; + }; + }; + }; + /** + * Triggers an export of the specified codespace and returns a URL and ID where the status of the export can be monitored. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/export-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 202: { + content: { + "application/json": components["schemas"]["codespace-export-details"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Gets information about an export of a codespace. + * + * You must authenticate using a personal access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/get-export-details-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + /** The ID of the export operation, or `latest`. Currently only `latest` is currently supported. */ + export_id: components["parameters"]["export-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace-export-details"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List the machine types a codespace can transition to use. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have read access to the `codespaces_metadata` repository permission to use this endpoint. + */ + "codespaces/codespace-machines-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": { + total_count: number; + machines: components["schemas"]["codespace-machine"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Starts a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/start-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + /** Payment required */ + 402: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** + * Stops a user's codespace. + * + * You must authenticate using an access token with the `codespace` scope to use this endpoint. + * + * GitHub Apps must have write access to the `codespaces_lifecycle_admin` repository permission to use this endpoint. + */ + "codespaces/stop-for-authenticated-user": { + parameters: { + path: { + /** The name of the codespace. */ + codespace_name: components["parameters"]["codespace-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["codespace"]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; + /** Sets the visibility for your primary email addresses. */ + "users/set-primary-email-visibility-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Denotes whether an email is publicly visible. + * @enum {string} + */ + visibility: "public" | "private"; + }; + }; + }; + }; + /** Lists all of your email addresses, and specifies which one is visible to the public. This endpoint is accessible with the `user:email` scope. */ + "users/list-emails-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** This endpoint is accessible with the `user` scope. */ + "users/add-email-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Adds one or more email addresses to your GitHub account. Must contain at least one email address. **Note:** Alternatively, you can pass a single email address or an `array` of emails addresses directly, but we recommend that you pass an object using the `emails` key. + * @example [] + */ + emails: string[]; + }; + }; + }; + }; + /** This endpoint is accessible with the `user` scope. */ + "users/delete-email-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description Email addresses associated with the GitHub user account. */ + emails: string[]; + }; + }; + }; + }; + /** Lists the people following the authenticated user. */ + "users/list-followers-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Lists the people who the authenticated user follows. */ + "users/list-followed-by-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "users/check-person-is-followed-by-authenticated": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** if the person is followed by the authenticated user */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** if the person is not followed by the authenticated user */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** + * Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." + * + * Following a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. + */ + "users/follow": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Unfollowing a user requires the user to be logged in and authenticated with basic auth or OAuth with the `user:follow` scope. */ + "users/unfollow": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the current user's GPG keys. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/list-gpg-keys-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gpg-key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Adds a GPG key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/create-gpg-key-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["gpg-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** @description A descriptive name for the new key. */ + name?: string; + /** @description A GPG key in ASCII-armored format. */ + armored_public_key: string; + }; + }; + }; + }; + /** View extended details for a single GPG key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/get-gpg-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the GPG key. */ + gpg_key_id: components["parameters"]["gpg-key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["gpg-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Removes a GPG key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:gpg_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/delete-gpg-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the GPG key. */ + gpg_key_id: components["parameters"]["gpg-key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Lists installations of your GitHub App that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You can find the permissions for the installation under the `permissions` key. + */ + "apps/list-installations-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** You can find the permissions for the installation under the `permissions` key. */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + installations: components["schemas"]["installation"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * List repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access for an installation. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + * + * You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. + * + * The access the user has to each repository is included in the hash under the `permissions` key. + */ + "apps/list-installation-repos-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** The access the user has to each repository is included in the hash under the `permissions` key. */ + 200: { + headers: {}; + content: { + "application/json": { + total_count: number; + repository_selection?: string; + repositories: components["schemas"]["repository"][]; + }; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Add a single repository to an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + "apps/add-repo-to-installation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Remove a single repository from an installation. The authenticated user must have admin access to the repository. + * + * You must use a personal access token (which you can create via the [command line](https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token) or [Basic Authentication](https://docs.github.com/rest/overview/other-authentication-methods#basic-authentication)) to access this endpoint. + */ + "apps/remove-repo-from-installation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the installation. */ + installation_id: components["parameters"]["installation-id"]; + /** The unique identifier of the repository. */ + repository_id: components["parameters"]["repository-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Shows which type of GitHub user can interact with your public repositories and when the restriction expires. */ + "interactions/get-restrictions-for-authenticated-user": { + responses: { + /** Default response */ + 200: { + content: { + "application/json": Partial< + components["schemas"]["interaction-limit-response"] + > & + Partial<{ [key: string]: unknown }>; + }; + }; + /** Response when there are no restrictions */ + 204: never; + }; + }; + /** Temporarily restricts which type of GitHub user can interact with your public repositories. Setting the interaction limit at the user level will overwrite any interaction limits that are set for individual repositories owned by the user. */ + "interactions/set-restrictions-for-authenticated-user": { + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["interaction-limit-response"]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": components["schemas"]["interaction-limit"]; + }; + }; + }; + /** Removes any interaction restrictions from your public repositories. */ + "interactions/remove-restrictions-for-authenticated-user": { + responses: { + /** Response */ + 204: never; + }; + }; + /** + * List issues across owned and member repositories assigned to the authenticated user. + * + * **Note**: GitHub's REST API v3 considers every pull request an issue, but not every issue is a pull request. For this + * reason, "Issues" endpoints may return both issues and pull requests in the response. You can identify pull requests by + * the `pull_request` key. Be aware that the `id` of a pull request returned from "Issues" endpoints will be an _issue id_. To find out the pull + * request id, use the "[List pull requests](https://docs.github.com/rest/reference/pulls#list-pull-requests)" endpoint. + */ + "issues/list-for-authenticated-user": { + parameters: { + query: { + /** Indicates which sorts of issues to return. `assigned` means issues assigned to you. `created` means issues created by you. `mentioned` means issues mentioning you. `subscribed` means issues you're subscribed to updates for. `all` or `repos` means all issues you can see, regardless of participation or creation. */ + filter?: + | "assigned" + | "created" + | "mentioned" + | "subscribed" + | "repos" + | "all"; + /** Indicates the state of the issues to return. */ + state?: "open" | "closed" | "all"; + /** A list of comma separated label names. Example: `bug,ui,@high` */ + labels?: components["parameters"]["labels"]; + /** What to sort results by. */ + sort?: "created" | "updated" | "comments"; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["issue"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the public SSH keys for the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/list-public-ssh-keys-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Adds a public SSH key to the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth, or OAuth with at least `write:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/create-public-ssh-key-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description A descriptive name for the new key. + * @example Personal MacBook Air + */ + title?: string; + /** @description The public SSH key to add to your GitHub account. */ + key: string; + }; + }; + }; + }; + /** View extended details for a single public SSH key. Requires that you are authenticated via Basic Auth or via OAuth with at least `read:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/get-public-ssh-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Removes a public SSH key from the authenticated user's GitHub account. Requires that you are authenticated via Basic Auth or via OAuth with at least `admin:public_key` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/). */ + "users/delete-public-ssh-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the key. */ + key_id: components["parameters"]["key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + "apps/list-subscriptions-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["user-marketplace-purchase"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists the active subscriptions for the authenticated user. You must use a [user-to-server OAuth access token](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/#identifying-users-on-your-site), created for a user who has authorized your GitHub App, to access this endpoint. . OAuth Apps must authenticate using an [OAuth token](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/). */ + "apps/list-subscriptions-for-authenticated-user-stubbed": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["user-marketplace-purchase"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + }; + }; + "orgs/list-memberships-for-authenticated-user": { + parameters: { + query: { + /** Indicates the state of the memberships to return. If not specified, the API returns both active and pending memberships. */ + state?: "active" | "pending"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["org-membership"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + "orgs/get-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "orgs/update-membership-for-authenticated-user": { + parameters: { + path: { + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["org-membership"]; + }; + }; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The state that the membership should be in. Only `"active"` will be accepted. + * @enum {string} + */ + state: "active"; + }; + }; + }; + }; + /** Lists all migrations a user has started. */ + "migrations/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["migration"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Initiates the generation of a user migration archive. */ + "migrations/start-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Lock the repositories being migrated at the start of the migration + * @example true + */ + lock_repositories?: boolean; + /** + * @description Indicates whether metadata should be excluded and only git source should be included for the migration. + * @example true + */ + exclude_metadata?: boolean; + /** + * @description Indicates whether the repository git data should be excluded from the migration. + * @example true + */ + exclude_git_data?: boolean; + /** + * @description Do not include attachments in the migration + * @example true + */ + exclude_attachments?: boolean; + /** + * @description Do not include releases in the migration + * @example true + */ + exclude_releases?: boolean; + /** + * @description Indicates whether projects owned by the organization or users should be excluded. + * @example true + */ + exclude_owner_projects?: boolean; + /** + * @description Indicates whether this should only include organization metadata (repositories array should be empty and will ignore other flags). + * @default false + * @example true + */ + org_metadata_only?: boolean; + /** + * @description Exclude attributes from the API response to improve performance + * @example [ + * "repositories" + * ] + */ + exclude?: "repositories"[]; + repositories: string[]; + }; + }; + }; + }; + /** + * Fetches a single user migration. The response includes the `state` of the migration, which can be one of the following values: + * + * * `pending` - the migration hasn't started yet. + * * `exporting` - the migration is in progress. + * * `exported` - the migration finished successfully. + * * `failed` - the migration failed. + * + * Once the migration has been `exported` you can [download the migration archive](https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive). + */ + "migrations/get-status-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + exclude?: string[]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["migration"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Fetches the URL to download the migration archive as a `tar.gz` file. Depending on the resources your repository uses, the migration archive can contain JSON files with data for these objects: + * + * * attachments + * * bases + * * commit\_comments + * * issue\_comments + * * issue\_events + * * issues + * * milestones + * * organizations + * * projects + * * protected\_branches + * * pull\_request\_reviews + * * pull\_requests + * * releases + * * repositories + * * review\_comments + * * schema + * * users + * + * The archive will also contain an `attachments` directory that includes all attachment files uploaded to GitHub.com and a `repositories` directory that contains the repository's Git data. + */ + "migrations/get-archive-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 302: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** Deletes a previous migration archive. Downloadable migration archives are automatically deleted after seven days. Migration metadata, which is returned in the [List user migrations](https://docs.github.com/rest/reference/migrations#list-user-migrations) and [Get a user migration status](https://docs.github.com/rest/reference/migrations#get-a-user-migration-status) endpoints, will continue to be available even after an archive is deleted. */ + "migrations/delete-archive-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Unlocks a repository. You can lock repositories when you [start a user migration](https://docs.github.com/rest/reference/migrations#start-a-user-migration). Once the migration is complete you can unlock each repository to begin using it again or [delete the repository](https://docs.github.com/rest/reference/repos#delete-a-repository) if you no longer need the source data. Returns a status of `404 Not Found` if the repository is not locked. */ + "migrations/unlock-repo-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + /** repo_name parameter */ + repo_name: components["parameters"]["repo-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists all the repositories for this user migration. */ + "migrations/list-repos-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the migration. */ + migration_id: components["parameters"]["migration-id"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** + * List organizations for the authenticated user. + * + * **OAuth scope requirements** + * + * This only lists organizations that your authorization allows you to operate on in some way (e.g., you can list teams with `read:org` scope, you can publicize your organization membership with `user` scope, etc.). Therefore, this API requires at least `user` or `read:org` scope. OAuth requests with insufficient scope receive a `403 Forbidden` response. + */ + "orgs/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Lists packages owned by the authenticated user within the user's namespace. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/list-packages-for-authenticated-user": { + parameters: { + query: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + visibility?: components["parameters"]["package-visibility"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + }; + }; + /** + * Gets a specific package for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Deletes a package owned by the authenticated user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/delete-package-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a package owned by the authenticated user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/restore-package-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + query: { + /** package token */ + token?: string; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists package versions for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-all-package-versions-for-package-owned-by-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + }; + query: { + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** The state of the package, either active or deleted. */ + state?: "active" | "deleted"; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a specific package version for a package owned by the authenticated user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-version-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Deletes a specific package version for a package owned by the authenticated user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must have admin permissions in the organization and authenticate using an access token with the `packages:read` and `packages:delete` scopes. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/delete-package-version-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a package version owned by the authenticated user. + * + * You can restore a deleted package version under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/restore-package-version-for-authenticated-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Creates a user project board. Returns a `410 Gone` status if the user does not have existing classic projects. If you do not have sufficient privileges to perform this action, a `401 Unauthorized` or `410 Gone` status is returned. */ + "projects/create-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["project"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed_simple"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description Name of the project + * @example Week One Sprint + */ + name: string; + /** + * @description Body of the project + * @example This project represents the sprint of the first week in January + */ + body?: string | null; + }; + }; + }; + }; + /** Lists your publicly visible email address, which you can set with the [Set primary email visibility for the authenticated user](https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user) endpoint. This endpoint is accessible with the `user:email` scope. */ + "users/list-public-emails-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["email"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists repositories that the authenticated user has explicit permission (`:read`, `:write`, or `:admin`) to access. + * + * The authenticated user has explicit permission to access repositories they own, repositories where they are a collaborator, and repositories that they can access through an organization membership. + */ + "repos/list-for-authenticated-user": { + parameters: { + query: { + /** Limit results to repositories with the specified visibility. */ + visibility?: "all" | "public" | "private"; + /** + * Comma-separated list of values. Can include: + * \* `owner`: Repositories that are owned by the authenticated user. + * \* `collaborator`: Repositories that the user has been added to as a collaborator. + * \* `organization_member`: Repositories that the user has access to through being a member of an organization. This includes every repository on every team that the user is on. + */ + affiliation?: string; + /** Limit results to repositories of the specified type. Will cause a `422` error if used in the same request as **visibility** or **affiliation**. */ + type?: "all" | "owner" | "public" | "private" | "member"; + /** The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** Only show notifications updated before the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + before?: components["parameters"]["before"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Creates a new repository for the authenticated user. + * + * **OAuth scope requirements** + * + * When using [OAuth](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/), authorizations must include: + * + * * `public_repo` scope or `repo` scope to create a public repository. Note: For GitHub AE, use `repo` scope to create an internal repository. + * * `repo` scope to create a private repository. + */ + "repos/create-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + headers: { + Location?: string; + }; + content: { + "application/json": components["schemas"]["repository"]; + }; + }; + 304: components["responses"]["not_modified"]; + 400: components["responses"]["bad_request"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description The name of the repository. + * @example Team Environment + */ + name: string; + /** @description A short description of the repository. */ + description?: string; + /** @description A URL with more information about the repository. */ + homepage?: string; + /** + * @description Whether the repository is private. + * @default false + */ + private?: boolean; + /** + * @description Whether issues are enabled. + * @default true + * @example true + */ + has_issues?: boolean; + /** + * @description Whether projects are enabled. + * @default true + * @example true + */ + has_projects?: boolean; + /** + * @description Whether the wiki is enabled. + * @default true + * @example true + */ + has_wiki?: boolean; + /** @description The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. */ + team_id?: number; + /** + * @description Whether the repository is initialized with a minimal README. + * @default false + */ + auto_init?: boolean; + /** + * @description The desired language or platform to apply to the .gitignore. + * @example Haskell + */ + gitignore_template?: string; + /** + * @description The license keyword of the open source license for this repository. + * @example mit + */ + license_template?: string; + /** + * @description Whether to allow squash merges for pull requests. + * @default true + * @example true + */ + allow_squash_merge?: boolean; + /** + * @description Whether to allow merge commits for pull requests. + * @default true + * @example true + */ + allow_merge_commit?: boolean; + /** + * @description Whether to allow rebase merges for pull requests. + * @default true + * @example true + */ + allow_rebase_merge?: boolean; + /** + * @description Whether to allow Auto-merge to be used on pull requests. + * @default false + * @example false + */ + allow_auto_merge?: boolean; + /** + * @description Whether to delete head branches when pull requests are merged + * @default false + * @example false + */ + delete_branch_on_merge?: boolean; + /** + * @description The default value for a squash merge commit title: + * + * - `PR_TITLE` - default to the pull request's title. + * - `COMMIT_OR_PR_TITLE` - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). + * @enum {string} + */ + squash_merge_commit_title?: "PR_TITLE" | "COMMIT_OR_PR_TITLE"; + /** + * @description The default value for a squash merge commit message: + * + * - `PR_BODY` - default to the pull request's body. + * - `COMMIT_MESSAGES` - default to the branch's commit messages. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + squash_merge_commit_message?: "PR_BODY" | "COMMIT_MESSAGES" | "BLANK"; + /** + * @description The default value for a merge commit title. + * + * - `PR_TITLE` - default to the pull request's title. + * - `MERGE_MESSAGE` - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). + * @enum {string} + */ + merge_commit_title?: "PR_TITLE" | "MERGE_MESSAGE"; + /** + * @description The default value for a merge commit message. + * + * - `PR_TITLE` - default to the pull request's title. + * - `PR_BODY` - default to the pull request's body. + * - `BLANK` - default to a blank commit message. + * @enum {string} + */ + merge_commit_message?: "PR_BODY" | "PR_TITLE" | "BLANK"; + /** + * @description Whether downloads are enabled. + * @default true + * @example true + */ + has_downloads?: boolean; + /** + * @description Whether this repository acts as a template that can be used to generate new repositories. + * @default false + * @example true + */ + is_template?: boolean; + }; + }; + }; + }; + /** When authenticating as a user, this endpoint will list all currently open repository invitations for that user. */ + "repos/list-invitations-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["repository-invitation"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "repos/decline-invitation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + }; + }; + "repos/accept-invitation-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the invitation. */ + invitation_id: components["parameters"]["invitation-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 409: components["responses"]["conflict"]; + }; + }; + /** Lists the SSH signing keys for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + "users/list-ssh-signing-keys-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["ssh-signing-key"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Creates an SSH signing key for the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `write:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + "users/create-ssh-signing-key-for-authenticated-user": { + parameters: {}; + responses: { + /** Response */ + 201: { + content: { + "application/json": components["schemas"]["ssh-signing-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + requestBody: { + content: { + "application/json": { + /** + * @description A descriptive name for the new key. + * @example Personal MacBook Air + */ + title?: string; + /** @description The public SSH key to add to your GitHub account. For more information, see "[Checking for existing SSH keys](https://docs.github.com/authentication/connecting-to-github-with-ssh/checking-for-existing-ssh-keys)." */ + key: string; + }; + }; + }; + }; + /** Gets extended details for an SSH signing key. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `read:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + "users/get-ssh-signing-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the SSH signing key. */ + ssh_signing_key_id: components["parameters"]["ssh-signing-key-id"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["ssh-signing-key"]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Deletes an SSH signing key from the authenticated user's GitHub account. You must authenticate with Basic Authentication, or you must authenticate with OAuth with at least `admin:ssh_signing_key` scope. For more information, see "[Understanding scopes for OAuth apps](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/)." */ + "users/delete-ssh-signing-key-for-authenticated-user": { + parameters: { + path: { + /** The unique identifier of the SSH signing key. */ + ssh_signing_key_id: components["parameters"]["ssh-signing-key-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists repositories the authenticated user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + "activity/list-repos-starred-by-authenticated-user": { + parameters: { + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["repository"][]; + "application/vnd.github.v3.star+json": components["schemas"]["starred-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + "activity/check-repo-is-starred-by-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response if this repository is starred by you */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + /** Not Found if this repository is not starred by you */ + 404: { + content: { + "application/json": components["schemas"]["basic-error"]; + }; + }; + }; + }; + /** Note that you'll need to set `Content-Length` to zero when calling out to this endpoint. For more information, see "[HTTP verbs](https://docs.github.com/rest/overview/resources-in-the-rest-api#http-verbs)." */ + "activity/star-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "activity/unstar-repo-for-authenticated-user": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + }; + }; + responses: { + /** Response */ + 204: never; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** Lists repositories the authenticated user is watching. */ + "activity/list-watched-repos-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** List all of the teams across all of the organizations to which the authenticated user belongs. This method requires `user`, `repo`, or `read:org` [scope](https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/) when authenticating via [OAuth](https://docs.github.com/apps/building-oauth-apps/). */ + "teams/list-for-authenticated-user": { + parameters: { + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["team-full"][]; + }; + }; + 304: components["responses"]["not_modified"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists all users, in the order that they signed up on GitHub. This list includes personal user accounts and organization accounts. + * + * Note: Pagination is powered exclusively by the `since` parameter. Use the [Link header](https://docs.github.com/rest/overview/resources-in-the-rest-api#link-header) to get the URL for the next page of users. + */ + "users/list": { + parameters: { + query: { + /** A user ID. Only return users with an ID greater than this ID. */ + since?: components["parameters"]["since-user"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: { + Link?: string; + }; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + 304: components["responses"]["not_modified"]; + }; + }; + /** + * Provides publicly available information about someone with a GitHub account. + * + * GitHub Apps with the `Plan` user permission can use this endpoint to retrieve information about a user's GitHub plan. The GitHub App must be authenticated as a user. See "[Identifying and authorizing users for GitHub Apps](https://docs.github.com/apps/building-github-apps/identifying-and-authorizing-users-for-github-apps/)" for details about authentication. For an example response, see 'Response with GitHub plan information' below" + * + * The `email` key in the following response is the publicly visible email address from your GitHub [profile page](https://github.com/settings/profile). When setting up your profile, you can select a primary email address to be “public” which provides an email entry for this endpoint. If you do not set a public email address for `email`, then it will have a value of `null`. You only see publicly visible email addresses when authenticated with GitHub. For more information, see [Authentication](https://docs.github.com/rest/overview/resources-in-the-rest-api#authentication). + * + * The Emails API enables you to list all of your email addresses, and toggle a primary email to be visible publicly. For more information, see "[Emails API](https://docs.github.com/rest/reference/users#emails)". + */ + "users/get-by-username": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": + | components["schemas"]["private-user"] + | components["schemas"]["public-user"]; + }; + }; + 404: components["responses"]["not_found"]; + }; + }; + /** If you are authenticated as the given user, you will see your private events. Otherwise, you'll only see public events. */ + "activity/list-events-for-authenticated-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** This is the user's organization dashboard. You must be authenticated as the user to view this. */ + "activity/list-org-events-for-authenticated-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** The organization name. The name is not case sensitive. */ + org: components["parameters"]["org"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + "activity/list-public-events-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** Lists the people following the specified user. */ + "users/list-followers-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + /** Lists the people who the specified user follows. */ + "users/list-following-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["simple-user"][]; + }; + }; + }; + }; + "users/check-following-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + target_user: string; + }; + }; + responses: { + /** if the user follows the target user */ + 204: never; + /** if the user does not follow the target user */ + 404: unknown; + }; + }; + /** Lists public gists for the specified user: */ + "gists/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Only show notifications updated after the given time. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. */ + since?: components["parameters"]["since"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["base-gist"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** Lists the GPG keys for a user. This information is accessible by anyone. */ + "users/list-gpg-keys-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["gpg-key"][]; + }; + }; + }; + }; + /** + * Provides hovercard information when authenticated through basic auth or OAuth with the `repo` scope. You can find out more about someone in relation to their pull requests, issues, repositories, and organizations. + * + * The `subject_type` and `subject_id` parameters provide context for the person's hovercard, which returns more information than without the parameters. For example, if you wanted to find out more about `octocat` who owns the `Spoon-Knife` repository via cURL, it would look like this: + * + * ```shell + * curl -u username:token + * https://api.github.com/users/octocat/hovercard?subject_type=repository&subject_id=1300192 + * ``` + */ + "users/get-context-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Identifies which additional information you'd like to receive about the person's hovercard. Can be `organization`, `repository`, `issue`, `pull_request`. **Required** when using `subject_id`. */ + subject_type?: "organization" | "repository" | "issue" | "pull_request"; + /** Uses the ID for the `subject_type` you specified. **Required** when using `subject_type`. */ + subject_id?: string; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["hovercard"]; + }; + }; + 404: components["responses"]["not_found"]; + 422: components["responses"]["validation_failed"]; + }; + }; + /** + * Enables an authenticated GitHub App to find the user’s installation information. + * + * You must use a [JWT](https://docs.github.com/apps/building-github-apps/authenticating-with-github-apps/#authenticating-as-a-github-app) to access this endpoint. + */ + "apps/get-user-installation": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["installation"]; + }; + }; + }; + }; + /** Lists the _verified_ public SSH keys for a user. This is accessible by anyone. */ + "users/list-public-keys-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["key-simple"][]; + }; + }; + }; + }; + /** + * List [public organization memberships](https://docs.github.com/articles/publicizing-or-concealing-organization-membership) for the specified user. + * + * This method only lists _public_ memberships, regardless of authentication. If you need to fetch all of the organization memberships (public and private) for the authenticated user, use the [List organizations for the authenticated user](https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user) API instead. + */ + "orgs/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["organization-simple"][]; + }; + }; + }; + }; + /** + * Lists all packages in a user's namespace for which the requesting user has access. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/list-packages-for-user": { + parameters: { + query: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + /** The selected visibility of the packages. Only `container` package_types currently support `internal` visibility properly. For other ecosystems `internal` is synonymous with `private`. This parameter is optional and only filters an existing result set. */ + visibility?: components["parameters"]["package-visibility"]; + }; + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + }; + }; + /** + * Gets a specific package metadata for a public package owned by a user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package"]; + }; + }; + }; + }; + /** + * Deletes an entire package for a user. You cannot delete a public package if any version of the package has more than 5,000 downloads. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores an entire package for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** package token */ + token?: string; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Lists package versions for a public package owned by a specified user. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-all-package-versions-for-package-owned-by-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"][]; + }; + }; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Gets a specific package version for a public package owned by a specified user. + * + * At this time, to use this endpoint, you must authenticate using an access token with the `packages:read` scope. + * If `package_type` is not `container`, your token must also include the `repo` scope. + */ + "packages/get-package-version-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["package-version"]; + }; + }; + }; + }; + /** + * Deletes a specific package version for a user. If the package is public and the package version has more than 5,000 downloads, you cannot delete the package version. In this scenario, contact GitHub support for further assistance. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:delete` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container you want to delete. + */ + "packages/delete-package-version-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + /** + * Restores a specific package version for a user. + * + * You can restore a deleted package under the following conditions: + * - The package was deleted within the last 30 days. + * - The same package namespace and version is still available and not reused for a new package. If the same package namespace is not available, you will not be able to restore your package. In this scenario, to restore the deleted package, you must delete the new package that uses the deleted package's namespace first. + * + * To use this endpoint, you must authenticate using an access token with the `packages:read` and `packages:write` scopes. In addition: + * - If `package_type` is not `container`, your token must also include the `repo` scope. + * - If `package_type` is `container`, you must also have admin permissions to the container that you want to restore. + */ + "packages/restore-package-version-for-user": { + parameters: { + path: { + /** The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. */ + package_type: components["parameters"]["package-type"]; + /** The name of the package. */ + package_name: components["parameters"]["package-name"]; + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + /** Unique identifier of the package version. */ + package_version_id: components["parameters"]["package-version-id"]; + }; + }; + responses: { + /** Response */ + 204: never; + 401: components["responses"]["requires_authentication"]; + 403: components["responses"]["forbidden"]; + 404: components["responses"]["not_found"]; + }; + }; + "projects/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Indicates the state of the projects to return. */ + state?: "open" | "closed" | "all"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["project"][]; + }; + }; + 422: components["responses"]["validation_failed"]; + }; + }; + /** These are events that you've received by watching repos and following users. If you are authenticated as the given user, you will see private events. Otherwise, you'll only see public events. */ + "activity/list-received-events-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + "activity/list-received-public-events-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["event"][]; + }; + }; + }; + }; + /** Lists public repositories for the specified user. Note: For GitHub AE, this endpoint will list internal repositories for the specified user. */ + "repos/list-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** Limit results to repositories of the specified type. */ + type?: "all" | "owner" | "member"; + /** The property to sort the results by. */ + sort?: "created" | "updated" | "pushed" | "full_name"; + /** The order to sort by. Default: `asc` when using `full_name`, otherwise `desc`. */ + direction?: "asc" | "desc"; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** + * Gets the summary of the free and paid GitHub Actions minutes used. + * + * Paid minutes only apply to workflows in private repositories that use GitHub-hosted runners. Minutes used is listed for each GitHub-hosted runner operating system. Any job re-runs are also included in the usage. The usage returned includes any minute multipliers for macOS and Windows runners, and is rounded up to the nearest whole minute. For more information, see "[Managing billing for GitHub Actions](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-actions)". + * + * Access tokens must have the `user` scope. + */ + "billing/get-github-actions-billing-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["actions-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the free and paid storage used for GitHub Packages in gigabytes. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + "billing/get-github-packages-billing-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["packages-billing-usage"]; + }; + }; + }; + }; + /** + * Gets the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages. + * + * Paid minutes only apply to packages stored for private repositories. For more information, see "[Managing billing for GitHub Packages](https://docs.github.com/github/setting-up-and-managing-billing-and-payments-on-github/managing-billing-for-github-packages)." + * + * Access tokens must have the `user` scope. + */ + "billing/get-shared-storage-billing-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["combined-billing-usage"]; + }; + }; + }; + }; + /** Lists the SSH signing keys for a user. This operation is accessible by anyone. */ + "users/list-ssh-signing-keys-for-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["ssh-signing-key"][]; + }; + }; + }; + }; + /** + * Lists repositories a user has starred. + * + * You can also find out _when_ stars were created by passing the following custom [media type](https://docs.github.com/rest/overview/media-types/) via the `Accept` header: `application/vnd.github.star+json`. + */ + "activity/list-repos-starred-by-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The property to sort the results by. `created` means when the repository was starred. `updated` means when the repository was last pushed to. */ + sort?: components["parameters"]["sort"]; + /** The direction to sort the results by. */ + direction?: components["parameters"]["direction"]; + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": Partial< + components["schemas"]["starred-repository"][] + > & + Partial; + }; + }; + }; + }; + /** Lists repositories a user is watching. */ + "activity/list-repos-watched-by-user": { + parameters: { + path: { + /** The handle for the GitHub user account. */ + username: components["parameters"]["username"]; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + headers: {}; + content: { + "application/json": components["schemas"]["minimal-repository"][]; + }; + }; + }; + }; + /** Get a random sentence from the Zen of GitHub */ + "meta/get-zen": { + responses: { + /** Response */ + 200: { + content: { + "text/plain": string; + }; + }; + }; + }; + /** + * **Deprecated**: Use `repos.compareCommitsWithBasehead()` (`GET /repos/{owner}/{repo}/compare/{basehead}`) instead. Both `:base` and `:head` must be branch names in `:repo`. To compare branches across other repositories in the same network as `:repo`, use the format `:branch`. + * + * The response from the API is equivalent to running the `git log base..head` command; however, commits are returned in chronological order. Pass the appropriate [media type](https://docs.github.com/rest/overview/media-types/#commits-commit-comparison-and-pull-requests) to fetch diff and patch formats. + * + * The response also includes details on the files that were changed between the two commits. This includes the status of the change (for example, if a file was added, removed, modified, or renamed), and details of the change itself. For example, files with a `renamed` status have a `previous_filename` field showing the previous filename of the file, and files with a `modified` status have a `patch` field showing the changes made to the file. + * + * **Working with large comparisons** + * + * To process a response with a large number of commits, you can use (`per_page` or `page`) to paginate the results. When using paging, the list of changed files is only returned with page 1, but includes all changed files for the entire comparison. For more information on working with pagination, see "[Traversing with pagination](/rest/guides/traversing-with-pagination)." + * + * When calling this API without any paging parameters (`per_page` or `page`), the returned list is limited to 250 commits and the last commit in the list is the most recent of the entire comparison. When a paging parameter is specified, the first commit in the returned list of each page is the earliest. + * + * **Signature verification object** + * + * The response will include a `verification` object that describes the result of verifying the commit's signature. The following fields are included in the `verification` object: + * + * | Name | Type | Description | + * | ---- | ---- | ----------- | + * | `verified` | `boolean` | Indicates whether GitHub considers the signature in this commit to be verified. | + * | `reason` | `string` | The reason for verified value. Possible values and their meanings are enumerated in table below. | + * | `signature` | `string` | The signature that was extracted from the commit. | + * | `payload` | `string` | The value that was signed. | + * + * These are the possible values for `reason` in the `verification` object: + * + * | Value | Description | + * | ----- | ----------- | + * | `expired_key` | The key that made the signature is expired. | + * | `not_signing_key` | The "signing" flag is not among the usage flags in the GPG key that made the signature. | + * | `gpgverify_error` | There was an error communicating with the signature verification service. | + * | `gpgverify_unavailable` | The signature verification service is currently unavailable. | + * | `unsigned` | The object does not include a signature. | + * | `unknown_signature_type` | A non-PGP signature was found in the commit. | + * | `no_user` | No user was associated with the `committer` email address in the commit. | + * | `unverified_email` | The `committer` email address in the commit was associated with a user, but the email address is not verified on her/his account. | + * | `bad_email` | The `committer` email address in the commit is not included in the identities of the PGP key that made the signature. | + * | `unknown_key` | The key that made the signature has not been registered with any user's account. | + * | `malformed_signature` | There was an error parsing the signature. | + * | `invalid` | The signature could not be cryptographically verified using the key whose key-id was found in the signature. | + * | `valid` | None of the above errors applied, so the signature is considered to be verified. | + */ + "repos/compare-commits": { + parameters: { + path: { + /** The account owner of the repository. The name is not case sensitive. */ + owner: components["parameters"]["owner"]; + /** The name of the repository. The name is not case sensitive. */ + repo: components["parameters"]["repo"]; + base: string; + head: string; + }; + query: { + /** The number of results per page (max 100). */ + per_page?: components["parameters"]["per-page"]; + /** Page number of the results to fetch. */ + page?: components["parameters"]["page"]; + }; + }; + responses: { + /** Response */ + 200: { + content: { + "application/json": components["schemas"]["commit-comparison"]; + }; + }; + 404: components["responses"]["not_found"]; + 500: components["responses"]["internal_error"]; + }; + }; +} + +export interface external {} diff --git a/node_modules/@octokit/request-error/LICENSE b/node_modules/@octokit/request-error/LICENSE new file mode 100644 index 0000000..ef2c18e --- /dev/null +++ b/node_modules/@octokit/request-error/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/@octokit/request-error/README.md b/node_modules/@octokit/request-error/README.md new file mode 100644 index 0000000..343c456 --- /dev/null +++ b/node_modules/@octokit/request-error/README.md @@ -0,0 +1,71 @@ +# http-error.js + +> Error class for Octokit request errors + +[![@latest](https://img.shields.io/npm/v/@octokit/request-error.svg)](https://www.npmjs.com/package/@octokit/request-error) +[![Build Status](https://github.com/octokit/request-error.js/workflows/Test/badge.svg)](https://github.com/octokit/request-error.js/actions?query=workflow%3ATest) + +## Usage + + + + + + +
+Browsers + +Load @octokit/request-error directly from cdn.skypack.dev + +```html + +``` + +
+Node + + +Install with npm install @octokit/request-error + +```js +const { RequestError } = require("@octokit/request-error"); +// or: import { RequestError } from "@octokit/request-error"; +``` + +
+ +```js +const error = new RequestError("Oops", 500, { + request: { + method: "POST", + url: "https://api.github.com/foo", + body: { + bar: "baz", + }, + headers: { + authorization: "token secret123", + }, + }, + response: { + status: 500, + url: "https://api.github.com/foo", + headers: { + "x-github-request-id": "1:2:3:4", + }, + data: { + foo: "bar", + }, + }, +}); + +error.message; // Oops +error.status; // 500 +error.request; // { method, url, headers, body } +error.response; // { url, status, headers, data } +``` + +## LICENSE + +[MIT](LICENSE) diff --git a/node_modules/@octokit/request-error/dist-node/index.js b/node_modules/@octokit/request-error/dist-node/index.js new file mode 100644 index 0000000..619f462 --- /dev/null +++ b/node_modules/@octokit/request-error/dist-node/index.js @@ -0,0 +1,74 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = require('deprecation'); +var once = _interopDefault(require('once')); + +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + let headers; + + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options + + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; // deprecations + + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + + }); + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/request-error/dist-node/index.js.map b/node_modules/@octokit/request-error/dist-node/index.js.map new file mode 100644 index 0000000..d349eda --- /dev/null +++ b/node_modules/@octokit/request-error/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":["logOnceCode","once","deprecation","console","warn","logOnceHeaders","RequestError","Error","constructor","message","statusCode","options","captureStackTrace","name","status","headers","response","requestCopy","Object","assign","request","authorization","replace","url","defineProperty","get","Deprecation"],"mappings":";;;;;;;;;AAEA,MAAMA,WAAW,GAAGC,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAAxB;AACA,MAAMG,cAAc,GAAGJ,IAAI,CAAEC,WAAD,IAAiBC,OAAO,CAACC,IAAR,CAAaF,WAAb,CAAlB,CAA3B;AACA;AACA;AACA;;AACO,MAAMI,YAAN,SAA2BC,KAA3B,CAAiC;EACpCC,WAAW,CAACC,OAAD,EAAUC,UAAV,EAAsBC,OAAtB,EAA+B;IACtC,MAAMF,OAAN,EADsC;;;;IAItC,IAAIF,KAAK,CAACK,iBAAV,EAA6B;MACzBL,KAAK,CAACK,iBAAN,CAAwB,IAAxB,EAA8B,KAAKJ,WAAnC;;;IAEJ,KAAKK,IAAL,GAAY,WAAZ;IACA,KAAKC,MAAL,GAAcJ,UAAd;IACA,IAAIK,OAAJ;;IACA,IAAI,aAAaJ,OAAb,IAAwB,OAAOA,OAAO,CAACI,OAAf,KAA2B,WAAvD,EAAoE;MAChEA,OAAO,GAAGJ,OAAO,CAACI,OAAlB;;;IAEJ,IAAI,cAAcJ,OAAlB,EAA2B;MACvB,KAAKK,QAAL,GAAgBL,OAAO,CAACK,QAAxB;MACAD,OAAO,GAAGJ,OAAO,CAACK,QAAR,CAAiBD,OAA3B;KAfkC;;;IAkBtC,MAAME,WAAW,GAAGC,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBR,OAAO,CAACS,OAA1B,CAApB;;IACA,IAAIT,OAAO,CAACS,OAAR,CAAgBL,OAAhB,CAAwBM,aAA5B,EAA2C;MACvCJ,WAAW,CAACF,OAAZ,GAAsBG,MAAM,CAACC,MAAP,CAAc,EAAd,EAAkBR,OAAO,CAACS,OAAR,CAAgBL,OAAlC,EAA2C;QAC7DM,aAAa,EAAEV,OAAO,CAACS,OAAR,CAAgBL,OAAhB,CAAwBM,aAAxB,CAAsCC,OAAtC,CAA8C,MAA9C,EAAsD,aAAtD;OADG,CAAtB;;;IAIJL,WAAW,CAACM,GAAZ,GAAkBN,WAAW,CAACM,GAAZ;;KAGbD,OAHa,CAGL,sBAHK,EAGmB,0BAHnB;;KAMbA,OANa,CAML,qBANK,EAMkB,yBANlB,CAAlB;IAOA,KAAKF,OAAL,GAAeH,WAAf,CA/BsC;;IAiCtCC,MAAM,CAACM,cAAP,CAAsB,IAAtB,EAA4B,MAA5B,EAAoC;MAChCC,GAAG,GAAG;QACFzB,WAAW,CAAC,IAAI0B,uBAAJ,CAAgB,0EAAhB,CAAD,CAAX;QACA,OAAOhB,UAAP;;;KAHR;IAMAQ,MAAM,CAACM,cAAP,CAAsB,IAAtB,EAA4B,SAA5B,EAAuC;MACnCC,GAAG,GAAG;QACFpB,cAAc,CAAC,IAAIqB,uBAAJ,CAAgB,uFAAhB,CAAD,CAAd;QACA,OAAOX,OAAO,IAAI,EAAlB;;;KAHR;;;AAxCgC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request-error/dist-src/index.js b/node_modules/@octokit/request-error/dist-src/index.js new file mode 100644 index 0000000..5eb1927 --- /dev/null +++ b/node_modules/@octokit/request-error/dist-src/index.js @@ -0,0 +1,55 @@ +import { Deprecation } from "deprecation"; +import once from "once"; +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +export class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]"), + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); + } +} diff --git a/node_modules/@octokit/request-error/dist-src/types.js b/node_modules/@octokit/request-error/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/request-error/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/request-error/dist-types/index.d.ts b/node_modules/@octokit/request-error/dist-types/index.d.ts new file mode 100644 index 0000000..d6e089c --- /dev/null +++ b/node_modules/@octokit/request-error/dist-types/index.d.ts @@ -0,0 +1,33 @@ +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; +import { RequestErrorOptions } from "./types"; +/** + * Error with extra properties to help with debugging + */ +export declare class RequestError extends Error { + name: "HttpError"; + /** + * http status code + */ + status: number; + /** + * http status code + * + * @deprecated `error.code` is deprecated in favor of `error.status` + */ + code: number; + /** + * Request options that lead to the error. + */ + request: RequestOptions; + /** + * error response headers + * + * @deprecated `error.headers` is deprecated in favor of `error.response.headers` + */ + headers: ResponseHeaders; + /** + * Response object if a response was received + */ + response?: OctokitResponse; + constructor(message: string, statusCode: number, options: RequestErrorOptions); +} diff --git a/node_modules/@octokit/request-error/dist-types/types.d.ts b/node_modules/@octokit/request-error/dist-types/types.d.ts new file mode 100644 index 0000000..7785231 --- /dev/null +++ b/node_modules/@octokit/request-error/dist-types/types.d.ts @@ -0,0 +1,9 @@ +import { RequestOptions, ResponseHeaders, OctokitResponse } from "@octokit/types"; +export declare type RequestErrorOptions = { + /** @deprecated set `response` instead */ + headers?: ResponseHeaders; + request: RequestOptions; +} | { + response: OctokitResponse; + request: RequestOptions; +}; diff --git a/node_modules/@octokit/request-error/dist-web/index.js b/node_modules/@octokit/request-error/dist-web/index.js new file mode 100644 index 0000000..0fb64be --- /dev/null +++ b/node_modules/@octokit/request-error/dist-web/index.js @@ -0,0 +1,59 @@ +import { Deprecation } from 'deprecation'; +import once from 'once'; + +const logOnceCode = once((deprecation) => console.warn(deprecation)); +const logOnceHeaders = once((deprecation) => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); + // Maintains proper stack trace (only available on V8) + /* istanbul ignore next */ + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + // redact request credentials without mutating original request options + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]"), + }); + } + requestCopy.url = requestCopy.url + // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") + // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + // deprecations + Object.defineProperty(this, "code", { + get() { + logOnceCode(new Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + }, + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + }, + }); + } +} + +export { RequestError }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/request-error/dist-web/index.js.map b/node_modules/@octokit/request-error/dist-web/index.js.map new file mode 100644 index 0000000..78f677f --- /dev/null +++ b/node_modules/@octokit/request-error/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/index.js"],"sourcesContent":["import { Deprecation } from \"deprecation\";\nimport once from \"once\";\nconst logOnceCode = once((deprecation) => console.warn(deprecation));\nconst logOnceHeaders = once((deprecation) => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nexport class RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\"),\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n },\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n },\n });\n }\n}\n"],"names":[],"mappings":";;;AAEA,MAAM,WAAW,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAG,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;AACxE;AACA;AACA;AACO,MAAM,YAAY,SAAS,KAAK,CAAC;AACxC,IAAI,WAAW,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE;AAC9C,QAAQ,KAAK,CAAC,OAAO,CAAC,CAAC;AACvB;AACA;AACA,QAAQ,IAAI,KAAK,CAAC,iBAAiB,EAAE;AACrC,YAAY,KAAK,CAAC,iBAAiB,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AAC5D,SAAS;AACT,QAAQ,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AAChC,QAAQ,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;AACjC,QAAQ,IAAI,OAAO,CAAC;AACpB,QAAQ,IAAI,SAAS,IAAI,OAAO,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,WAAW,EAAE;AAC5E,YAAY,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACtC,SAAS;AACT,QAAQ,IAAI,UAAU,IAAI,OAAO,EAAE;AACnC,YAAY,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AAC7C,YAAY,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;AAC/C,SAAS;AACT;AACA,QAAQ,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC;AAC/D,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,EAAE;AACnD,YAAY,WAAW,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE;AAC7E,gBAAgB,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,MAAM,EAAE,aAAa,CAAC;AACnG,aAAa,CAAC,CAAC;AACf,SAAS;AACT,QAAQ,WAAW,CAAC,GAAG,GAAG,WAAW,CAAC,GAAG;AACzC;AACA;AACA,aAAa,OAAO,CAAC,sBAAsB,EAAE,0BAA0B,CAAC;AACxE;AACA;AACA,aAAa,OAAO,CAAC,qBAAqB,EAAE,yBAAyB,CAAC,CAAC;AACvE,QAAQ,IAAI,CAAC,OAAO,GAAG,WAAW,CAAC;AACnC;AACA,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE;AAC5C,YAAY,GAAG,GAAG;AAClB,gBAAgB,WAAW,CAAC,IAAI,WAAW,CAAC,0EAA0E,CAAC,CAAC,CAAC;AACzH,gBAAgB,OAAO,UAAU,CAAC;AAClC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,QAAQ,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,EAAE;AAC/C,YAAY,GAAG,GAAG;AAClB,gBAAgB,cAAc,CAAC,IAAI,WAAW,CAAC,uFAAuF,CAAC,CAAC,CAAC;AACzI,gBAAgB,OAAO,OAAO,IAAI,EAAE,CAAC;AACrC,aAAa;AACb,SAAS,CAAC,CAAC;AACX,KAAK;AACL;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/request-error/package.json b/node_modules/@octokit/request-error/package.json new file mode 100644 index 0000000..772c95a --- /dev/null +++ b/node_modules/@octokit/request-error/package.json @@ -0,0 +1,49 @@ +{ + "name": "@octokit/request-error", + "description": "Error class for Octokit request errors", + "version": "3.0.2", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "keywords": [ + "octokit", + "github", + "api", + "error" + ], + "repository": "github:octokit/request-error.js", + "dependencies": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-bundle-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/jest": "^29.0.0", + "@types/node": "^16.0.0", + "@types/once": "^1.4.0", + "jest": "^29.0.0", + "pika-plugin-unpkg-field": "^1.1.0", + "prettier": "2.7.1", + "ts-jest": "^29.0.0", + "typescript": "^4.0.0" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@octokit/types/LICENSE b/node_modules/@octokit/types/LICENSE new file mode 100644 index 0000000..57bee5f --- /dev/null +++ b/node_modules/@octokit/types/LICENSE @@ -0,0 +1,7 @@ +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@octokit/types/README.md b/node_modules/@octokit/types/README.md new file mode 100644 index 0000000..c48ce42 --- /dev/null +++ b/node_modules/@octokit/types/README.md @@ -0,0 +1,65 @@ +# types.ts + +> Shared TypeScript definitions for Octokit projects + +[![@latest](https://img.shields.io/npm/v/@octokit/types.svg)](https://www.npmjs.com/package/@octokit/types) +[![Build Status](https://github.com/octokit/types.ts/workflows/Test/badge.svg)](https://github.com/octokit/types.ts/actions?workflow=Test) + + + +- [Usage](#usage) +- [Examples](#examples) + - [Get parameter and response data types for a REST API endpoint](#get-parameter-and-response-data-types-for-a-rest-api-endpoint) + - [Get response types from endpoint methods](#get-response-types-from-endpoint-methods) +- [Contributing](#contributing) +- [License](#license) + + + +## Usage + +See all exported types at https://octokit.github.io/types.ts + +## Examples + +### Get parameter and response data types for a REST API endpoint + +```ts +import { Endpoints } from "@octokit/types"; + +type listUserReposParameters = + Endpoints["GET /repos/{owner}/{repo}"]["parameters"]; +type listUserReposResponse = Endpoints["GET /repos/{owner}/{repo}"]["response"]; + +async function listRepos( + options: listUserReposParameters +): listUserReposResponse["data"] { + // ... +} +``` + +### Get response types from endpoint methods + +```ts +import { + GetResponseTypeFromEndpointMethod, + GetResponseDataTypeFromEndpointMethod, +} from "@octokit/types"; +import { Octokit } from "@octokit/rest"; + +const octokit = new Octokit(); +type CreateLabelResponseType = GetResponseTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +type CreateLabelResponseDataType = GetResponseDataTypeFromEndpointMethod< + typeof octokit.issues.createLabel +>; +``` + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/types/dist-node/index.js b/node_modules/@octokit/types/dist-node/index.js new file mode 100644 index 0000000..18a39b7 --- /dev/null +++ b/node_modules/@octokit/types/dist-node/index.js @@ -0,0 +1,8 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +const VERSION = "8.0.0"; + +exports.VERSION = VERSION; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/types/dist-node/index.js.map b/node_modules/@octokit/types/dist-node/index.js.map new file mode 100644 index 0000000..2d148d3 --- /dev/null +++ b/node_modules/@octokit/types/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/VERSION.js"],"sourcesContent":["export const VERSION = \"0.0.0-development\";\n"],"names":["VERSION"],"mappings":";;;;MAAaA,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/types/dist-src/AuthInterface.js b/node_modules/@octokit/types/dist-src/AuthInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/AuthInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/EndpointDefaults.js b/node_modules/@octokit/types/dist-src/EndpointDefaults.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/EndpointDefaults.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/EndpointInterface.js b/node_modules/@octokit/types/dist-src/EndpointInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/EndpointInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/EndpointOptions.js b/node_modules/@octokit/types/dist-src/EndpointOptions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/EndpointOptions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Fetch.js b/node_modules/@octokit/types/dist-src/Fetch.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Fetch.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js b/node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/GetResponseTypeFromEndpointMethod.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/OctokitResponse.js b/node_modules/@octokit/types/dist-src/OctokitResponse.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/OctokitResponse.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestError.js b/node_modules/@octokit/types/dist-src/RequestError.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestError.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestHeaders.js b/node_modules/@octokit/types/dist-src/RequestHeaders.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestHeaders.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestInterface.js b/node_modules/@octokit/types/dist-src/RequestInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestMethod.js b/node_modules/@octokit/types/dist-src/RequestMethod.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestMethod.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestOptions.js b/node_modules/@octokit/types/dist-src/RequestOptions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestOptions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestParameters.js b/node_modules/@octokit/types/dist-src/RequestParameters.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestParameters.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/RequestRequestOptions.js b/node_modules/@octokit/types/dist-src/RequestRequestOptions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/RequestRequestOptions.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/ResponseHeaders.js b/node_modules/@octokit/types/dist-src/ResponseHeaders.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/ResponseHeaders.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Route.js b/node_modules/@octokit/types/dist-src/Route.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Route.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Signal.js b/node_modules/@octokit/types/dist-src/Signal.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Signal.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/StrategyInterface.js b/node_modules/@octokit/types/dist-src/StrategyInterface.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/StrategyInterface.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/Url.js b/node_modules/@octokit/types/dist-src/Url.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/Url.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/VERSION.js b/node_modules/@octokit/types/dist-src/VERSION.js new file mode 100644 index 0000000..608eb42 --- /dev/null +++ b/node_modules/@octokit/types/dist-src/VERSION.js @@ -0,0 +1 @@ +export const VERSION = "8.0.0"; diff --git a/node_modules/@octokit/types/dist-src/generated/Endpoints.js b/node_modules/@octokit/types/dist-src/generated/Endpoints.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/types/dist-src/generated/Endpoints.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/types/dist-src/index.js b/node_modules/@octokit/types/dist-src/index.js new file mode 100644 index 0000000..004ae9b --- /dev/null +++ b/node_modules/@octokit/types/dist-src/index.js @@ -0,0 +1,21 @@ +export * from "./AuthInterface"; +export * from "./EndpointDefaults"; +export * from "./EndpointInterface"; +export * from "./EndpointOptions"; +export * from "./Fetch"; +export * from "./OctokitResponse"; +export * from "./RequestError"; +export * from "./RequestHeaders"; +export * from "./RequestInterface"; +export * from "./RequestMethod"; +export * from "./RequestOptions"; +export * from "./RequestParameters"; +export * from "./RequestRequestOptions"; +export * from "./ResponseHeaders"; +export * from "./Route"; +export * from "./Signal"; +export * from "./StrategyInterface"; +export * from "./Url"; +export * from "./VERSION"; +export * from "./GetResponseTypeFromEndpointMethod"; +export * from "./generated/Endpoints"; diff --git a/node_modules/@octokit/types/dist-types/AuthInterface.d.ts b/node_modules/@octokit/types/dist-types/AuthInterface.d.ts new file mode 100644 index 0000000..8b39d61 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/AuthInterface.d.ts @@ -0,0 +1,31 @@ +import { EndpointOptions } from "./EndpointOptions"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestInterface } from "./RequestInterface"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +/** + * Interface to implement complex authentication strategies for Octokit. + * An object Implementing the AuthInterface can directly be passed as the + * `auth` option in the Octokit constructor. + * + * For the official implementations of the most common authentication + * strategies, see https://github.com/octokit/auth.js + */ +export interface AuthInterface { + (...args: AuthOptions): Promise; + hook: { + /** + * Sends a request using the passed `request` instance + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, options: EndpointOptions): Promise>; + /** + * Sends a request using the passed `request` instance + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (request: RequestInterface, route: Route, parameters?: RequestParameters): Promise>; + }; +} diff --git a/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts b/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts new file mode 100644 index 0000000..a2c2307 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/EndpointDefaults.d.ts @@ -0,0 +1,21 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestParameters } from "./RequestParameters"; +import { Url } from "./Url"; +/** + * The `.endpoint()` method is guaranteed to set all keys defined by RequestParameters + * as well as the method property. + */ +export declare type EndpointDefaults = RequestParameters & { + baseUrl: Url; + method: RequestMethod; + url?: Url; + headers: RequestHeaders & { + accept: string; + "user-agent": string; + }; + mediaType: { + format: string; + previews: string[]; + }; +}; diff --git a/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts b/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts new file mode 100644 index 0000000..d7b4009 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/EndpointInterface.d.ts @@ -0,0 +1,65 @@ +import { EndpointDefaults } from "./EndpointDefaults"; +import { RequestOptions } from "./RequestOptions"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface EndpointInterface { + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {object} endpoint Must set `url` unless it's set defaults. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): RequestOptions & Pick; + /** + * Transforms a GitHub REST API endpoint into generic request options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, parameters?: P): (R extends keyof Endpoints ? Endpoints[R]["request"] : RequestOptions) & Pick; + /** + * Object with current default route and parameters + */ + DEFAULTS: D & EndpointDefaults; + /** + * Returns a new `endpoint` interface with new defaults + */ + defaults: (newDefaults: O) => EndpointInterface; + merge: { + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + * + */ + (route: keyof Endpoints | R, parameters?: P): D & (R extends keyof Endpoints ? Endpoints[R]["request"] & Endpoints[R]["parameters"] : EndpointDefaults) & P; + /** + * Merges current endpoint defaults with passed route and parameters, + * without transforming them into request options. + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ +

(options: P): EndpointDefaults & D & P; + /** + * Returns current default options. + * + * @deprecated use endpoint.DEFAULTS instead + */ + (): D & EndpointDefaults; + }; + /** + * Stateless method to turn endpoint options into request options. + * Calling `endpoint(options)` is the same as calling `endpoint.parse(endpoint.merge(options))`. + * + * @param {object} options `method`, `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + parse: (options: O) => RequestOptions & Pick; +} diff --git a/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts b/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts new file mode 100644 index 0000000..b1b91f1 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/EndpointOptions.d.ts @@ -0,0 +1,7 @@ +import { RequestMethod } from "./RequestMethod"; +import { Url } from "./Url"; +import { RequestParameters } from "./RequestParameters"; +export declare type EndpointOptions = RequestParameters & { + method: RequestMethod; + url: Url; +}; diff --git a/node_modules/@octokit/types/dist-types/Fetch.d.ts b/node_modules/@octokit/types/dist-types/Fetch.d.ts new file mode 100644 index 0000000..cbbd5e8 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Fetch.d.ts @@ -0,0 +1,4 @@ +/** + * Browser's fetch method (or compatible such as fetch-mock) + */ +export declare type Fetch = any; diff --git a/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts b/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts new file mode 100644 index 0000000..70e1a8d --- /dev/null +++ b/node_modules/@octokit/types/dist-types/GetResponseTypeFromEndpointMethod.d.ts @@ -0,0 +1,5 @@ +declare type Unwrap = T extends Promise ? U : T; +declare type AnyFunction = (...args: any[]) => any; +export declare type GetResponseTypeFromEndpointMethod = Unwrap>; +export declare type GetResponseDataTypeFromEndpointMethod = Unwrap>["data"]; +export {}; diff --git a/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts b/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts new file mode 100644 index 0000000..28fdfb8 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/OctokitResponse.d.ts @@ -0,0 +1,17 @@ +import { ResponseHeaders } from "./ResponseHeaders"; +import { Url } from "./Url"; +export declare type OctokitResponse = { + headers: ResponseHeaders; + /** + * http response code + */ + status: S; + /** + * URL of response after all redirects + */ + url: Url; + /** + * Response data as documented in the REST API reference documentation at https://docs.github.com/rest/reference + */ + data: T; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestError.d.ts b/node_modules/@octokit/types/dist-types/RequestError.d.ts new file mode 100644 index 0000000..89174e6 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestError.d.ts @@ -0,0 +1,11 @@ +export declare type RequestError = { + name: string; + status: number; + documentation_url: string; + errors?: Array<{ + resource: string; + code: string; + field: string; + message?: string; + }>; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts b/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts new file mode 100644 index 0000000..ac5aae0 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestHeaders.d.ts @@ -0,0 +1,15 @@ +export declare type RequestHeaders = { + /** + * Avoid setting `headers.accept`, use `mediaType.{format|previews}` option instead. + */ + accept?: string; + /** + * Use `authorization` to send authenticated request, remember `token ` / `bearer ` prefixes. Example: `token 1234567890abcdef1234567890abcdef12345678` + */ + authorization?: string; + /** + * `user-agent` is set do a default and can be overwritten as needed. + */ + "user-agent"?: string; + [header: string]: string | number | undefined; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestInterface.d.ts b/node_modules/@octokit/types/dist-types/RequestInterface.d.ts new file mode 100644 index 0000000..851811f --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestInterface.d.ts @@ -0,0 +1,34 @@ +import { EndpointInterface } from "./EndpointInterface"; +import { OctokitResponse } from "./OctokitResponse"; +import { RequestParameters } from "./RequestParameters"; +import { Route } from "./Route"; +import { Endpoints } from "./generated/Endpoints"; +export interface RequestInterface { + /** + * Sends a request based on endpoint options + * + * @param {object} endpoint Must set `method` and `url`. Plus URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (options: O & { + method?: string; + } & ("url" extends keyof D ? { + url?: string; + } : { + url: string; + })): Promise>; + /** + * Sends a request based on endpoint options + * + * @param {string} route Request method + URL. Example: `'GET /orgs/{org}'` + * @param {object} [parameters] URL, query or body parameters, as well as `headers`, `mediaType.{format|previews}`, `request`, or `baseUrl`. + */ + (route: keyof Endpoints | R, options?: R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters): R extends keyof Endpoints ? Promise : Promise>; + /** + * Returns a new `request` with updated route and parameters + */ + defaults: (newDefaults: O) => RequestInterface; + /** + * Octokit endpoint API, see {@link https://github.com/octokit/endpoint.js|@octokit/endpoint} + */ + endpoint: EndpointInterface; +} diff --git a/node_modules/@octokit/types/dist-types/RequestMethod.d.ts b/node_modules/@octokit/types/dist-types/RequestMethod.d.ts new file mode 100644 index 0000000..e999c8d --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestMethod.d.ts @@ -0,0 +1,4 @@ +/** + * HTTP Verb supported by GitHub's REST API + */ +export declare type RequestMethod = "DELETE" | "GET" | "HEAD" | "PATCH" | "POST" | "PUT"; diff --git a/node_modules/@octokit/types/dist-types/RequestOptions.d.ts b/node_modules/@octokit/types/dist-types/RequestOptions.d.ts new file mode 100644 index 0000000..97e2181 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestOptions.d.ts @@ -0,0 +1,14 @@ +import { RequestHeaders } from "./RequestHeaders"; +import { RequestMethod } from "./RequestMethod"; +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { Url } from "./Url"; +/** + * Generic request options as they are returned by the `endpoint()` method + */ +export declare type RequestOptions = { + method: RequestMethod; + url: Url; + headers: RequestHeaders; + body?: any; + request?: RequestRequestOptions; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestParameters.d.ts b/node_modules/@octokit/types/dist-types/RequestParameters.d.ts new file mode 100644 index 0000000..b056a0e --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestParameters.d.ts @@ -0,0 +1,45 @@ +import { RequestRequestOptions } from "./RequestRequestOptions"; +import { RequestHeaders } from "./RequestHeaders"; +import { Url } from "./Url"; +/** + * Parameters that can be passed into `request(route, parameters)` or `endpoint(route, parameters)` methods + */ +export declare type RequestParameters = { + /** + * Base URL to be used when a relative URL is passed, such as `/orgs/{org}`. + * If `baseUrl` is `https://enterprise.acme-inc.com/api/v3`, then the request + * will be sent to `https://enterprise.acme-inc.com/api/v3/orgs/{org}`. + */ + baseUrl?: Url; + /** + * HTTP headers. Use lowercase keys. + */ + headers?: RequestHeaders; + /** + * Media type options, see {@link https://developer.github.com/v3/media/|GitHub Developer Guide} + */ + mediaType?: { + /** + * `json` by default. Can be `raw`, `text`, `html`, `full`, `diff`, `patch`, `sha`, `base64`. Depending on endpoint + */ + format?: string; + /** + * Custom media type names of {@link https://developer.github.com/v3/media/|API Previews} without the `-preview` suffix. + * Example for single preview: `['squirrel-girl']`. + * Example for multiple previews: `['squirrel-girl', 'mister-fantastic']`. + */ + previews?: string[]; + }; + /** + * Pass custom meta information for the request. The `request` object will be returned as is. + */ + request?: RequestRequestOptions; + /** + * Any additional parameter will be passed as follows + * 1. URL parameter if `':parameter'` or `{parameter}` is part of `url` + * 2. Query parameter if `method` is `'GET'` or `'HEAD'` + * 3. Request body if `parameter` is `'data'` + * 4. JSON in the request body in the form of `body[parameter]` unless `parameter` key is `'data'` + */ + [parameter: string]: unknown; +}; diff --git a/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts b/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts new file mode 100644 index 0000000..8f5c43a --- /dev/null +++ b/node_modules/@octokit/types/dist-types/RequestRequestOptions.d.ts @@ -0,0 +1,26 @@ +import { Fetch } from "./Fetch"; +import { Signal } from "./Signal"; +/** + * Octokit-specific request options which are ignored for the actual request, but can be used by Octokit or plugins to manipulate how the request is sent or how a response is handled + */ +export declare type RequestRequestOptions = { + /** + * Node only. Useful for custom proxy, certificate, or dns lookup. + * + * @see https://nodejs.org/api/http.html#http_class_http_agent + */ + agent?: unknown; + /** + * Custom replacement for built-in fetch method. Useful for testing or request hooks. + */ + fetch?: Fetch; + /** + * Use an `AbortController` instance to cancel a request. In node you can only cancel streamed requests. + */ + signal?: Signal; + /** + * Node only. Request/response timeout in ms, it resets on redirect. 0 to disable (OS limit applies). `options.request.signal` is recommended instead. + */ + timeout?: number; + [option: string]: any; +}; diff --git a/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts b/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts new file mode 100644 index 0000000..c8fbe43 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/ResponseHeaders.d.ts @@ -0,0 +1,20 @@ +export declare type ResponseHeaders = { + "cache-control"?: string; + "content-length"?: number; + "content-type"?: string; + date?: string; + etag?: string; + "last-modified"?: string; + link?: string; + location?: string; + server?: string; + status?: string; + vary?: string; + "x-github-mediatype"?: string; + "x-github-request-id"?: string; + "x-oauth-scopes"?: string; + "x-ratelimit-limit"?: string; + "x-ratelimit-remaining"?: string; + "x-ratelimit-reset"?: string; + [header: string]: string | number | undefined; +}; diff --git a/node_modules/@octokit/types/dist-types/Route.d.ts b/node_modules/@octokit/types/dist-types/Route.d.ts new file mode 100644 index 0000000..dcaac75 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Route.d.ts @@ -0,0 +1,4 @@ +/** + * String consisting of an optional HTTP method and relative path or absolute URL. Examples: `'/orgs/{org}'`, `'PUT /orgs/{org}'`, `GET https://example.com/foo/bar` + */ +export declare type Route = string; diff --git a/node_modules/@octokit/types/dist-types/Signal.d.ts b/node_modules/@octokit/types/dist-types/Signal.d.ts new file mode 100644 index 0000000..4ebcf24 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Signal.d.ts @@ -0,0 +1,6 @@ +/** + * Abort signal + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export declare type Signal = any; diff --git a/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts b/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts new file mode 100644 index 0000000..405cbd2 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/StrategyInterface.d.ts @@ -0,0 +1,4 @@ +import { AuthInterface } from "./AuthInterface"; +export interface StrategyInterface { + (...args: StrategyOptions): AuthInterface; +} diff --git a/node_modules/@octokit/types/dist-types/Url.d.ts b/node_modules/@octokit/types/dist-types/Url.d.ts new file mode 100644 index 0000000..3e69916 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/Url.d.ts @@ -0,0 +1,4 @@ +/** + * Relative or absolute URL. Examples: `'/orgs/{org}'`, `https://example.com/foo/bar` + */ +export declare type Url = string; diff --git a/node_modules/@octokit/types/dist-types/VERSION.d.ts b/node_modules/@octokit/types/dist-types/VERSION.d.ts new file mode 100644 index 0000000..ce4c301 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/VERSION.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "8.0.0"; diff --git a/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts b/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts new file mode 100644 index 0000000..4fb64d7 --- /dev/null +++ b/node_modules/@octokit/types/dist-types/generated/Endpoints.d.ts @@ -0,0 +1,3499 @@ +import { paths } from "@octokit/openapi-types"; +import { OctokitResponse } from "../OctokitResponse"; +import { RequestHeaders } from "../RequestHeaders"; +import { RequestRequestOptions } from "../RequestRequestOptions"; +declare type UnionToIntersection = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; +declare type ExtractParameters = "parameters" extends keyof T ? UnionToIntersection<{ + [K in keyof T["parameters"]]: T["parameters"][K]; +}[keyof T["parameters"]]> : {}; +declare type ExtractRequestBody = "requestBody" extends keyof T ? "content" extends keyof T["requestBody"] ? "application/json" extends keyof T["requestBody"]["content"] ? T["requestBody"]["content"]["application/json"] : { + data: { + [K in keyof T["requestBody"]["content"]]: T["requestBody"]["content"][K]; + }[keyof T["requestBody"]["content"]]; +} : "application/json" extends keyof T["requestBody"] ? T["requestBody"]["application/json"] : { + data: { + [K in keyof T["requestBody"]]: T["requestBody"][K]; + }[keyof T["requestBody"]]; +} : {}; +declare type ToOctokitParameters = ExtractParameters & ExtractRequestBody; +declare type RequiredPreview = T extends string ? { + mediaType: { + previews: [T, ...string[]]; + }; +} : {}; +declare type Operation = { + parameters: ToOctokitParameters & RequiredPreview; + request: { + method: Method extends keyof MethodsMap ? MethodsMap[Method] : never; + url: Url; + headers: RequestHeaders; + request: RequestRequestOptions; + }; + response: ExtractOctokitResponse; +}; +declare type MethodsMap = { + delete: "DELETE"; + get: "GET"; + patch: "PATCH"; + post: "POST"; + put: "PUT"; +}; +declare type SuccessStatuses = 200 | 201 | 202 | 204; +declare type RedirectStatuses = 301 | 302; +declare type EmptyResponseStatuses = 201 | 204; +declare type KnownJsonResponseTypes = "application/json" | "application/scim+json" | "text/html"; +declare type SuccessResponseDataType = { + [K in SuccessStatuses & keyof Responses]: GetContentKeyIfPresent extends never ? never : OctokitResponse, K>; +}[SuccessStatuses & keyof Responses]; +declare type RedirectResponseDataType = { + [K in RedirectStatuses & keyof Responses]: OctokitResponse; +}[RedirectStatuses & keyof Responses]; +declare type EmptyResponseDataType = { + [K in EmptyResponseStatuses & keyof Responses]: OctokitResponse; +}[EmptyResponseStatuses & keyof Responses]; +declare type GetContentKeyIfPresent = "content" extends keyof T ? DataType : DataType; +declare type DataType = { + [K in KnownJsonResponseTypes & keyof T]: T[K]; +}[KnownJsonResponseTypes & keyof T]; +declare type ExtractOctokitResponse = "responses" extends keyof R ? SuccessResponseDataType extends never ? RedirectResponseDataType extends never ? EmptyResponseDataType : RedirectResponseDataType : SuccessResponseDataType : unknown; +export interface Endpoints { + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-installation-for-the-authenticated-app + */ + "DELETE /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#unsuspend-an-app-installation + */ + "DELETE /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-authorization + */ + "DELETE /applications/{client_id}/grant": Operation<"/applications/{client_id}/grant", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#delete-an-app-token + */ + "DELETE /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-selected-organization-for-github-actions-in-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/permissions/organizations/{org_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-group-from-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-organization-access-to-a-self-hosted-runner-group-in-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-self-hosted-runner-from-a-group-for-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-self-hosted-runner-from-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-an-enterprise + */ + "DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist + */ + "DELETE /gists/{gist_id}": Operation<"/gists/{gist_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#delete-a-gist-comment + */ + "DELETE /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/gists#unstar-a-gist + */ + "DELETE /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#revoke-an-installation-access-token + */ + "DELETE /installation/token": Operation<"/installation/token", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-thread-subscription + */ + "DELETE /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-an-organization-secret + */ + "DELETE /organizations/{org}/codespaces/secrets/{secret_name}": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-selected-repository-from-an-organization-secret + */ + "DELETE /organizations/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-selected-repository-for-github-actions-in-an-organization + */ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-group-from-an-organization + */ + "DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-self-hosted-runner-from-a-group-for-an-organization + */ + "DELETE /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-an-organization + */ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#unblock-a-user-from-an-organization + */ + "DELETE /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#delete-a-custom-role + */ + "DELETE /orgs/{org}/custom_roles/{role_id}": Operation<"/orgs/{org}/custom_roles/{role_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#remove-selected-repository-from-an-organization-secret + */ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#delete-an-organization-webhook + */ + "DELETE /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-an-organization + */ + "DELETE /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#cancel-an-organization-invitation + */ + "DELETE /orgs/{org}/invitations/{invitation_id}": Operation<"/orgs/{org}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-an-organization-member + */ + "DELETE /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-organization-membership-for-a-user + */ + "DELETE /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#delete-an-organization-migration-archive + */ + "DELETE /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#unlock-an-organization-repository + */ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-outside-collaborator-from-an-organization + */ + "DELETE /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-an-organization + */ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-public-organization-membership-for-the-authenticated-user + */ + "DELETE /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/orgs#remove-a-security-manager-team + */ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}": Operation<"/orgs/{org}/security-managers/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-comment-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-team-discussion-reaction + */ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user + */ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-a-project-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team + */ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-card + */ + "DELETE /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project-column + */ + "DELETE /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#delete-a-project + */ + "DELETE /projects/{project_id}": Operation<"/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/projects#remove-project-collaborator + */ + "DELETE /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-repository + */ + "DELETE /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-artifact + */ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id + */ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}": Operation<"/repos/{owner}/{repo}/actions/caches/{cache_id}", "delete">; + /** + * @see https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key + */ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}": Operation<"/repos/{owner}/{repo}/actions/caches", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-self-hosted-runner-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-all-custom-labels-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#remove-a-custom-label-from-a-self-hosted-runner-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-workflow-run + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-workflow-run-logs + */ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/v3/repos#delete-autolink + */ + "DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-automated-security-fixes + */ + "DELETE /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-admin-branch-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-pull-request-review-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-commit-signature-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-status-check-protection + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-status-check-contexts + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-app-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-team-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#remove-user-access-restrictions + */ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "delete">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#delete-a-code-scanning-analysis-from-a-repository + */ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#remove-a-repository-collaborator + */ + "DELETE /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "delete">; + /** + * @see https://docs.github.com/rest/commits/comments#delete-a-commit-comment + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-commit-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-file + */ + "DELETE /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "delete">; + /** + * @see https://docs.github.com/rest/reference/dependabot#delete-a-repository-secret + */ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-deployment + */ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-an-environment + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "delete">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#delete-deployment-branch-policy + */ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/git#delete-a-reference + */ + "DELETE /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "delete">; + /** + * @see https://docs.github.com/rest/webhooks/repos#delete-a-repository-webhook + */ + "DELETE /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#cancel-an-import + */ + "DELETE /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#delete-a-repository-invitation + */ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-an-issue-comment + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-assignees-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-all-labels-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#remove-a-label-from-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#unlock-an-issue + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-an-issue-reaction + */ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-deploy-key + */ + "DELETE /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-label + */ + "DELETE /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-git-lfs-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "delete">; + /** + * @see https://docs.github.com/rest/reference/issues#delete-a-milestone + */ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "delete">; + /** + * @see https://docs.github.com/rest/pages#delete-a-github-pages-site + */ + "DELETE /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-review-comment-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions#delete-a-pull-request-comment-reaction + */ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#remove-requested-reviewers-from-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "delete">; + /** + * @see https://docs.github.com/rest/reference/pulls#delete-a-pending-review-for-a-pull-request + */ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release-asset + */ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-a-release + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/reactions/#delete-a-release-reaction + */ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#delete-a-repository-subscription + */ + "DELETE /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#delete-tag-protection-state-for-a-repository + */ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}": Operation<"/repos/{owner}/{repo}/tags/protection/{tag_protection_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/repos#disable-vulnerability-alerts + */ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "delete">; + /** + * @see https://docs.github.com/rest/reference/actions#delete-an-environment-secret + */ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#delete-a-team-legacy + */ + "DELETE /teams/{team_id}": Operation<"/teams/{team_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#delete-a-discussion-comment-legacy + */ + "DELETE /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-member-legacy + */ + "DELETE /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams#remove-team-membership-for-a-user-legacy + */ + "DELETE /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-project-from-a-team-legacy + */ + "DELETE /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/teams/#remove-a-repository-from-a-team-legacy + */ + "DELETE /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unblock-a-user + */ + "DELETE /user/blocks/{username}": Operation<"/user/blocks/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-secret-for-the-authenticated-user + */ + "DELETE /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#remove-a-selected-repository-from-a-user-secret + */ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/codespaces#delete-a-codespace-for-the-authenticated-user + */ + "DELETE /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-an-email-address-for-the-authenticated-user + */ + "DELETE /user/emails": Operation<"/user/emails", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#unfollow-a-user + */ + "DELETE /user/following/{username}": Operation<"/user/following/{username}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-gpg-key-for-the-authenticated-user + */ + "DELETE /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/apps#remove-a-repository-from-an-app-installation + */ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/interactions#remove-interaction-restrictions-from-your-public-repositories + */ + "DELETE /user/interaction-limits": Operation<"/user/interaction-limits", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-public-ssh-key-for-the-authenticated-user + */ + "DELETE /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#delete-a-user-migration-archive + */ + "DELETE /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "delete">; + /** + * @see https://docs.github.com/rest/reference/migrations#unlock-a-user-repository + */ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock": Operation<"/user/migrations/{migration_id}/repos/{repo_name}/lock", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-the-authenticated-user + */ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#decline-a-repository-invitation + */ + "DELETE /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/users#delete-a-ssh-signing-key-for-the-authenticated-user + */ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}": Operation<"/user/ssh_signing_keys/{ssh_signing_key_id}", "delete">; + /** + * @see https://docs.github.com/rest/reference/activity#unstar-a-repository-for-the-authenticated-user + */ + "DELETE /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "delete">; + /** + * @see https://docs.github.com/rest/reference/packages#delete-a-package-version-for-a-user + */ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "delete">; + /** + * @see https://docs.github.com/rest/overview/resources-in-the-rest-api#root-endpoint + */ + "GET /": Operation<"/", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-the-authenticated-app + */ + "GET /app": Operation<"/app", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-webhook-configuration-for-an-app + */ + "GET /app/hook/config": Operation<"/app/hook/config", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-deliveries-for-an-app-webhook + */ + "GET /app/hook/deliveries": Operation<"/app/hook/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-delivery-for-an-app-webhook + */ + "GET /app/hook/deliveries/{delivery_id}": Operation<"/app/hook/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-installations-for-the-authenticated-app + */ + "GET /app/installations": Operation<"/app/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-installation-for-the-authenticated-app + */ + "GET /app/installations/{installation_id}": Operation<"/app/installations/{installation_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps/#get-an-app + */ + "GET /apps/{app_slug}": Operation<"/apps/{app_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-all-codes-of-conduct + */ + "GET /codes_of_conduct": Operation<"/codes_of_conduct", "get">; + /** + * @see https://docs.github.com/rest/reference/codes-of-conduct#get-a-code-of-conduct + */ + "GET /codes_of_conduct/{key}": Operation<"/codes_of_conduct/{key}", "get">; + /** + * @see https://docs.github.com/rest/reference/emojis#get-emojis + */ + "GET /emojis": Operation<"/emojis", "get">; + /** + * @see https://docs.github.com/rest/reference/enterprise-admin#get-github-enterprise-server-statistics + */ + "GET /enterprise-installation/{enterprise_or_org}/server-statistics": Operation<"/enterprise-installation/{enterprise_or_org}/server-statistics", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/cache/usage": Operation<"/enterprises/{enterprise}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions": Operation<"/enterprises/{enterprise}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-organizations-enabled-for-github-actions-in-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/organizations": Operation<"/enterprises/{enterprise}/actions/permissions/organizations", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/selected-actions": Operation<"/enterprises/{enterprise}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/permissions/workflow": Operation<"/enterprises/{enterprise}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runner-groups-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups": Operation<"/enterprises/{enterprise}/actions/runner-groups", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-group-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-access-to-a-self-hosted-runner-group-in-a-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-in-a-group-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners": Operation<"/enterprises/{enterprise}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners/downloads": Operation<"/enterprises/{enterprise}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-an-enterprise + */ + "GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/code-scanning/alerts": Operation<"/enterprises/{enterprise}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-enterprise + */ + "GET /enterprises/{enterprise}/secret-scanning/alerts": Operation<"/enterprises/{enterprise}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#export-advanced-security-active-committers-data-for-enterprise + */ + "GET /enterprises/{enterprise}/settings/billing/advanced-security": Operation<"/enterprises/{enterprise}/settings/billing/advanced-security", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events + */ + "GET /events": Operation<"/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-feeds + */ + "GET /feeds": Operation<"/feeds", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-the-authenticated-user + */ + "GET /gists": Operation<"/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-public-gists + */ + "GET /gists/public": Operation<"/gists/public", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-starred-gists + */ + "GET /gists/starred": Operation<"/gists/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist + */ + "GET /gists/{gist_id}": Operation<"/gists/{gist_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-comments + */ + "GET /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-comment + */ + "GET /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-commits + */ + "GET /gists/{gist_id}/commits": Operation<"/gists/{gist_id}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gist-forks + */ + "GET /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#check-if-a-gist-is-starred + */ + "GET /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#get-a-gist-revision + */ + "GET /gists/{gist_id}/{sha}": Operation<"/gists/{gist_id}/{sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-all-gitignore-templates + */ + "GET /gitignore/templates": Operation<"/gitignore/templates", "get">; + /** + * @see https://docs.github.com/rest/reference/gitignore#get-a-gitignore-template + */ + "GET /gitignore/templates/{name}": Operation<"/gitignore/templates/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-app-installation + */ + "GET /installation/repositories": Operation<"/installation/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issues-assigned-to-the-authenticated-user + */ + "GET /issues": Operation<"/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-all-commonly-used-licenses + */ + "GET /licenses": Operation<"/licenses", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses#get-a-license + */ + "GET /licenses/{license}": Operation<"/licenses/{license}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account + */ + "GET /marketplace_listing/accounts/{account_id}": Operation<"/marketplace_listing/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans + */ + "GET /marketplace_listing/plans": Operation<"/marketplace_listing/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan + */ + "GET /marketplace_listing/plans/{plan_id}/accounts": Operation<"/marketplace_listing/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-subscription-plan-for-an-account-stubbed + */ + "GET /marketplace_listing/stubbed/accounts/{account_id}": Operation<"/marketplace_listing/stubbed/accounts/{account_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-plans-stubbed + */ + "GET /marketplace_listing/stubbed/plans": Operation<"/marketplace_listing/stubbed/plans", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-accounts-for-a-plan-stubbed + */ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts": Operation<"/marketplace_listing/stubbed/plans/{plan_id}/accounts", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-github-meta-information + */ + "GET /meta": Operation<"/meta", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-network-of-repositories + */ + "GET /networks/{owner}/{repo}/events": Operation<"/networks/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-notifications-for-the-authenticated-user + */ + "GET /notifications": Operation<"/notifications", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread + */ + "GET /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-thread-subscription-for-the-authenticated-user + */ + "GET /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/meta#get-octocat + */ + "GET /octocat": Operation<"/octocat", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations + */ + "GET /organizations": Operation<"/organizations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-custom-repository-roles-in-an-organization + */ + "GET /organizations/{organization_id}/custom_roles": Operation<"/organizations/{organization_id}/custom_roles", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-organization-secrets + */ + "GET /organizations/{org}/codespaces/secrets": Operation<"/organizations/{org}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-an-organization-public-key + */ + "GET /organizations/{org}/codespaces/secrets/public-key": Operation<"/organizations/{org}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-an-organization-secret + */ + "GET /organizations/{org}/codespaces/secrets/{secret_name}": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-an-organization-secret + */ + "GET /organizations/{org}/codespaces/secrets/{secret_name}/repositories": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + * @deprecated "org_id" is now "org" + */ + "GET /orgs/{org_id}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization + */ + "GET /orgs/{org}": Operation<"/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage": Operation<"/orgs/{org}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repositories-with-github-actions-cache-usage-for-an-organization + */ + "GET /orgs/{org}/actions/cache/usage-by-repository": Operation<"/orgs/{org}/actions/cache/usage-by-repository", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "GET /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-an-organization + */ + "GET /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions + */ + "GET /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runner-groups-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups": Operation<"/orgs/{org}/actions/runner-groups", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-group-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-in-a-group-for-an-organization + */ + "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-an-organization + */ + "GET /orgs/{org}/actions/runners": Operation<"/orgs/{org}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-an-organization + */ + "GET /orgs/{org}/actions/runners/downloads": Operation<"/orgs/{org}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}": Operation<"/orgs/{org}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-an-organization + */ + "GET /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-organization-secrets + */ + "GET /orgs/{org}/actions/secrets": Operation<"/orgs/{org}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-public-key + */ + "GET /orgs/{org}/actions/secrets/public-key": Operation<"/orgs/{org}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-users-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks": Operation<"/orgs/{org}/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-if-a-user-is-blocked-by-an-organization + */ + "GET /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-by-organization + */ + "GET /orgs/{org}/code-scanning/alerts": Operation<"/orgs/{org}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-in-organization + */ + "GET /orgs/{org}/codespaces": Operation<"/orgs/{org}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-organization-secrets + */ + "GET /orgs/{org}/dependabot/secrets": Operation<"/orgs/{org}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-public-key + */ + "GET /orgs/{org}/dependabot/secrets/public-key": Operation<"/orgs/{org}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-selected-repositories-for-an-organization-secret + */ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-organization-events + */ + "GET /orgs/{org}/events": Operation<"/orgs/{org}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-failed-organization-invitations + */ + "GET /orgs/{org}/failed_invitations": Operation<"/orgs/{org}/failed_invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-fine-grained-permissions-for-an-organization + */ + "GET /orgs/{org}/fine_grained_permissions": Operation<"/orgs/{org}/fine_grained_permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-webhooks + */ + "GET /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-configuration-for-an-organization + */ + "GET /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-deliveries-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-a-webhook-delivery-for-an-organization-webhook + */ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-an-organization-installation-for-the-authenticated-app + */ + "GET /orgs/{org}/installation": Operation<"/orgs/{org}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-app-installations-for-an-organization + */ + "GET /orgs/{org}/installations": Operation<"/orgs/{org}/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-an-organization + */ + "GET /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-pending-organization-invitations + */ + "GET /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-invitation-teams + */ + "GET /orgs/{org}/invitations/{invitation_id}/teams": Operation<"/orgs/{org}/invitations/{invitation_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-organization-issues-assigned-to-the-authenticated-user + */ + "GET /orgs/{org}/issues": Operation<"/orgs/{org}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-members + */ + "GET /orgs/{org}/members": Operation<"/orgs/{org}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-organization-membership-for-a-user + */ + "GET /orgs/{org}/members/{username}": Operation<"/orgs/{org}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-organization-membership-for-a-user + */ + "GET /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-organization-migrations + */ + "GET /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-an-organization-migration-status + */ + "GET /orgs/{org}/migrations/{migration_id}": Operation<"/orgs/{org}/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#download-an-organization-migration-archive + */ + "GET /orgs/{org}/migrations/{migration_id}/archive": Operation<"/orgs/{org}/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-repositories-in-an-organization-migration + */ + "GET /orgs/{org}/migrations/{migration_id}/repositories": Operation<"/orgs/{org}/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-outside-collaborators-for-an-organization + */ + "GET /orgs/{org}/outside_collaborators": Operation<"/orgs/{org}/outside_collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-an-organization + */ + "GET /orgs/{org}/packages": Operation<"/orgs/{org}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-an-organization + */ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-organization-projects + */ + "GET /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-public-organization-members + */ + "GET /orgs/{org}/public_members": Operation<"/orgs/{org}/public_members", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#check-public-organization-membership-for-a-user + */ + "GET /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-organization-repositories + */ + "GET /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-an-organization + */ + "GET /orgs/{org}/secret-scanning/alerts": Operation<"/orgs/{org}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-security-manager-teams + */ + "GET /orgs/{org}/security-managers": Operation<"/orgs/{org}/security-managers", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/actions": Operation<"/orgs/{org}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-advanced-security-active-committers-for-an-organization + */ + "GET /orgs/{org}/settings/billing/advanced-security": Operation<"/orgs/{org}/settings/billing/advanced-security", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/packages": Operation<"/orgs/{org}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-an-organization + */ + "GET /orgs/{org}/settings/billing/shared-storage": Operation<"/orgs/{org}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams + */ + "GET /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-team-by-name + */ + "GET /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions + */ + "GET /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion-comment + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-team-discussion + */ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations + */ + "GET /orgs/{org}/teams/{team_slug}/invitations": Operation<"/orgs/{org}/teams/{team_slug}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members + */ + "GET /orgs/{org}/teams/{team_slug}/members": Operation<"/orgs/{org}/teams/{team_slug}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user + */ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-projects + */ + "GET /orgs/{org}/teams/{team_slug}/projects": Operation<"/orgs/{org}/teams/{team_slug}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#check-team-permissions-for-a-project + */ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-repositories + */ + "GET /orgs/{org}/teams/{team_slug}/repos": Operation<"/orgs/{org}/teams/{team_slug}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository + */ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-child-teams + */ + "GET /orgs/{org}/teams/{team_slug}/teams": Operation<"/orgs/{org}/teams/{team_slug}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-card + */ + "GET /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project-column + */ + "GET /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-cards + */ + "GET /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-a-project + */ + "GET /projects/{project_id}": Operation<"/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-collaborators + */ + "GET /projects/{project_id}/collaborators": Operation<"/projects/{project_id}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#get-project-permission-for-a-user + */ + "GET /projects/{project_id}/collaborators/{username}/permission": Operation<"/projects/{project_id}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-project-columns + */ + "GET /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "get">; + /** + * @see https://docs.github.com/rest/reference/rate-limit#get-rate-limit-status-for-the-authenticated-user + */ + "GET /rate_limit": Operation<"/rate_limit", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository + */ + "GET /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-artifacts-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/artifacts": Operation<"/repos/{owner}/{repo}/actions/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-an-artifact + */ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}": Operation<"/repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-cache-usage-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/cache/usage": Operation<"/repos/{owner}/{repo}/actions/cache/usage", "get">; + /** + * @see https://docs.github.com/rest/actions/cache#list-github-actions-caches-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/caches": Operation<"/repos/{owner}/{repo}/actions/caches", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-job-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-job-logs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-github-actions-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-access-level-to-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-allowed-actions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-default-workflow-permissions-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-self-hosted-runners-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners": Operation<"/repos/{owner}/{repo}/actions/runners", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-runner-applications-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/downloads": Operation<"/repos/{owner}/{repo}/actions/runners/downloads", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-labels-for-a-self-hosted-runner-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs-for-a-repository + */ + "GET /repos/{owner}/{repo}/actions/runs": Operation<"/repos/{owner}/{repo}/actions/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-the-review-history-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approvals", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-run-artifacts + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run-attempt + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-attempt-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-jobs-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#download-workflow-run-logs + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/logs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-pending-deployments-for-a-workflow-run + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-run-usage + */ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/actions/secrets": Operation<"/repos/{owner}/{repo}/actions/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/actions/secrets/public-key": Operation<"/repos/{owner}/{repo}/actions/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-repository-workflows + */ + "GET /repos/{owner}/{repo}/actions/workflows": Operation<"/repos/{owner}/{repo}/actions/workflows", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-a-workflow + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-workflow-runs + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-workflow-usage + */ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-assignees + */ + "GET /repos/{owner}/{repo}/assignees": Operation<"/repos/{owner}/{repo}/assignees", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#check-if-a-user-can-be-assigned + */ + "GET /repos/{owner}/{repo}/assignees/{assignee}": Operation<"/repos/{owner}/{repo}/assignees/{assignee}", "get">; + /** + * @see https://docs.github.com/v3/repos#list-autolinks + */ + "GET /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "get">; + /** + * @see https://docs.github.com/v3/repos#get-autolink + */ + "GET /repos/{owner}/{repo}/autolinks/{autolink_id}": Operation<"/repos/{owner}/{repo}/autolinks/{autolink_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-branches + */ + "GET /repos/{owner}/{repo}/branches": Operation<"/repos/{owner}/{repo}/branches", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}": Operation<"/repos/{owner}/{repo}/branches/{branch}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-admin-branch-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-pull-request-review-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-commit-signature-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-status-checks-protection + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-status-check-contexts + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-access-restrictions + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-apps-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-teams-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-users-with-access-to-the-protected-branch + */ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-run + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-run-annotations + */ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#get-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-in-a-check-suite + */ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts": Operation<"/repos/{owner}/{repo}/code-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + * @deprecated "alert_id" is now "alert_number" + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-instances-of-a-code-scanning-alert + */ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses": Operation<"/repos/{owner}/{repo}/code-scanning/analyses", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-a-code-scanning-analysis-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}": Operation<"/repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-codeql-databases + */ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases": Operation<"/repos/{owner}/{repo}/code-scanning/codeql/databases", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#get-codeql-database + */ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}": Operation<"/repos/{owner}/{repo}/code-scanning/codeql/databases/{language}", "get">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#list-recent-code-scanning-analyses-for-a-repository + */ + "GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-codeowners-errors + */ + "GET /repos/{owner}/{repo}/codeowners/errors": Operation<"/repos/{owner}/{repo}/codeowners/errors", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-devcontainers-in-a-repository-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/codespaces/devcontainers": Operation<"/repos/{owner}/{repo}/codespaces/devcontainers", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-available-machine-types-for-a-repository + */ + "GET /repos/{owner}/{repo}/codespaces/machines": Operation<"/repos/{owner}/{repo}/codespaces/machines", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#preview-attributes-for-a-new-codespace + */ + "GET /repos/{owner}/{repo}/codespaces/new": Operation<"/repos/{owner}/{repo}/codespaces/new", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/codespaces/secrets": Operation<"/repos/{owner}/{repo}/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key": Operation<"/repos/{owner}/{repo}/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#list-repository-collaborators + */ + "GET /repos/{owner}/{repo}/collaborators": Operation<"/repos/{owner}/{repo}/collaborators", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#check-if-a-user-is-a-repository-collaborator + */ + "GET /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "get">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#get-repository-permissions-for-a-user + */ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission": Operation<"/repos/{owner}/{repo}/collaborators/{username}/permission", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/comments": Operation<"/repos/{owner}/{repo}/comments", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#get-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-commit-comment + */ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-commits + */ + "GET /repos/{owner}/{repo}/commits": Operation<"/repos/{owner}/{repo}/commits", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-branches-for-head-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "get">; + /** + * @see https://docs.github.com/rest/commits/comments#list-commit-comments + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#list-pull-requests-associated-with-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/pulls", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#get-a-commit + */ + "GET /repos/{owner}/{repo}/commits/{ref}": Operation<"/repos/{owner}/{repo}/commits/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-runs-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-runs", "get">; + /** + * @see https://docs.github.com/rest/reference/checks#list-check-suites-for-a-git-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites": Operation<"/repos/{owner}/{repo}/commits/{ref}/check-suites", "get">; + /** + * @see https://docs.github.com/rest/commits/statuses#get-the-combined-status-for-a-specific-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/status": Operation<"/repos/{owner}/{repo}/commits/{ref}/status", "get">; + /** + * @see https://docs.github.com/rest/commits/statuses#list-commit-statuses-for-a-reference + */ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses": Operation<"/repos/{owner}/{repo}/commits/{ref}/statuses", "get">; + /** + * @see https://docs.github.com/rest/metrics/community#get-community-profile-metrics + */ + "GET /repos/{owner}/{repo}/community/profile": Operation<"/repos/{owner}/{repo}/community/profile", "get">; + /** + * @see https://docs.github.com/rest/commits/commits#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{basehead}": Operation<"/repos/{owner}/{repo}/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#compare-two-commits + */ + "GET /repos/{owner}/{repo}/compare/{base}...{head}": Operation<"/repos/{owner}/{repo}/compare/{base}...{head}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-repository-content + */ + "GET /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-contributors + */ + "GET /repos/{owner}/{repo}/contributors": Operation<"/repos/{owner}/{repo}/contributors", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-dependabot-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/dependabot/alerts": Operation<"/repos/{owner}/{repo}/dependabot/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-dependabot-alert + */ + "GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#list-repository-secrets + */ + "GET /repos/{owner}/{repo}/dependabot/secrets": Operation<"/repos/{owner}/{repo}/dependabot/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-public-key + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key": Operation<"/repos/{owner}/{repo}/dependabot/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/dependabot#get-a-repository-secret + */ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#get-a-diff-of-the-dependencies-between-commits + */ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}": Operation<"/repos/{owner}/{repo}/dependency-graph/compare/{basehead}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deployments + */ + "GET /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-deployment + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deployment-statuses + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-deployment-status + */ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}", "get">; + /** + * @see https://docs.github.com/rest/deployments/environments#list-environments + */ + "GET /repos/{owner}/{repo}/environments": Operation<"/repos/{owner}/{repo}/environments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-an-environment + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "get">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#list-deployment-branch-policies + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "get">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#get-deployment-branch-policy + */ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-events + */ + "GET /repos/{owner}/{repo}/events": Operation<"/repos/{owner}/{repo}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-forks + */ + "GET /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-blob + */ + "GET /repos/{owner}/{repo}/git/blobs/{file_sha}": Operation<"/repos/{owner}/{repo}/git/blobs/{file_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-commit + */ + "GET /repos/{owner}/{repo}/git/commits/{commit_sha}": Operation<"/repos/{owner}/{repo}/git/commits/{commit_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#list-matching-references + */ + "GET /repos/{owner}/{repo}/git/matching-refs/{ref}": Operation<"/repos/{owner}/{repo}/git/matching-refs/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-reference + */ + "GET /repos/{owner}/{repo}/git/ref/{ref}": Operation<"/repos/{owner}/{repo}/git/ref/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tag + */ + "GET /repos/{owner}/{repo}/git/tags/{tag_sha}": Operation<"/repos/{owner}/{repo}/git/tags/{tag_sha}", "get">; + /** + * @see https://docs.github.com/rest/reference/git#get-a-tree + */ + "GET /repos/{owner}/{repo}/git/trees/{tree_sha}": Operation<"/repos/{owner}/{repo}/git/trees/{tree_sha}", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repos#list-repository-webhooks + */ + "GET /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repos#get-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-config#get-a-webhook-configuration-for-a-repository + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#list-deliveries-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "get">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#get-a-delivery-for-a-repository-webhook + */ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-an-import-status + */ + "GET /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-commit-authors + */ + "GET /repos/{owner}/{repo}/import/authors": Operation<"/repos/{owner}/{repo}/import/authors", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-large-files + */ + "GET /repos/{owner}/{repo}/import/large_files": Operation<"/repos/{owner}/{repo}/import/large_files", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-repository-installation-for-the-authenticated-app + */ + "GET /repos/{owner}/{repo}/installation": Operation<"/repos/{owner}/{repo}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-a-repository + */ + "GET /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations + */ + "GET /repos/{owner}/{repo}/invitations": Operation<"/repos/{owner}/{repo}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-repository-issues + */ + "GET /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/comments": Operation<"/repos/{owner}/{repo}/issues/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue-comment + */ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events-for-a-repository + */ + "GET /repos/{owner}/{repo}/issues/events": Operation<"/repos/{owner}/{repo}/issues/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue-event + */ + "GET /repos/{owner}/{repo}/issues/events/{event_id}": Operation<"/repos/{owner}/{repo}/issues/events/{event_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-comments + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-issue-events + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/events": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-timeline-events-for-an-issue + */ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/timeline", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-deploy-keys + */ + "GET /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-deploy-key + */ + "GET /repos/{owner}/{repo}/keys/{key_id}": Operation<"/repos/{owner}/{repo}/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-a-repository + */ + "GET /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-label + */ + "GET /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-languages + */ + "GET /repos/{owner}/{repo}/languages": Operation<"/repos/{owner}/{repo}/languages", "get">; + /** + * @see https://docs.github.com/rest/reference/licenses/#get-the-license-for-a-repository + */ + "GET /repos/{owner}/{repo}/license": Operation<"/repos/{owner}/{repo}/license", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-milestones + */ + "GET /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#get-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-labels-for-issues-in-a-milestone + */ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repository-notifications-for-the-authenticated-user + */ + "GET /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "get">; + /** + * @see https://docs.github.com/rest/pages#get-a-github-pages-site + */ + "GET /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "get">; + /** + * @see https://docs.github.com/rest/pages#list-github-pages-builds + */ + "GET /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "get">; + /** + * @see https://docs.github.com/rest/pages#get-latest-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/latest": Operation<"/repos/{owner}/{repo}/pages/builds/latest", "get">; + /** + * @see https://docs.github.com/rest/pages#get-github-pages-build + */ + "GET /repos/{owner}/{repo}/pages/builds/{build_id}": Operation<"/repos/{owner}/{repo}/pages/builds/{build_id}", "get">; + /** + * @see https://docs.github.com/rest/pages#get-a-dns-health-check-for-github-pages + */ + "GET /repos/{owner}/{repo}/pages/health": Operation<"/repos/{owner}/{repo}/pages/health", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-repository-projects + */ + "GET /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests + */ + "GET /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-in-a-repository + */ + "GET /repos/{owner}/{repo}/pulls/comments": Operation<"/repos/{owner}/{repo}/pulls/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-comment-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions#list-reactions-for-a-pull-request-review-comment + */ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-review-comments-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-commits-on-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-pull-requests-files + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/files", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#check-if-a-pull-request-has-been-merged + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-all-requested-reviewers-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-reviews-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#get-a-review-for-a-pull-request + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/pulls#list-comments-for-a-pull-request-review + */ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-readme + */ + "GET /repos/{owner}/{repo}/readme": Operation<"/repos/{owner}/{repo}/readme", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-repository-directory-readme + */ + "GET /repos/{owner}/{repo}/readme/{dir}": Operation<"/repos/{owner}/{repo}/readme/{dir}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-releases + */ + "GET /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-asset + */ + "GET /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-the-latest-release + */ + "GET /repos/{owner}/{repo}/releases/latest": Operation<"/repos/{owner}/{repo}/releases/latest", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release-by-tag-name + */ + "GET /repos/{owner}/{repo}/releases/tags/{tag}": Operation<"/repos/{owner}/{repo}/releases/tags/{tag}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-release-assets + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-release + */ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-secret-scanning-alerts-for-a-repository + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#get-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#list-locations-for-a-secret-scanning-alert + */ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-stargazers + */ + "GET /repos/{owner}/{repo}/stargazers": Operation<"/repos/{owner}/{repo}/stargazers", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/code_frequency": Operation<"/repos/{owner}/{repo}/stats/code_frequency", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-last-year-of-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/commit_activity": Operation<"/repos/{owner}/{repo}/stats/commit_activity", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-all-contributor-commit-activity + */ + "GET /repos/{owner}/{repo}/stats/contributors": Operation<"/repos/{owner}/{repo}/stats/contributors", "get">; + /** + * @see https://docs.github.com/rest/metrics/statistics#get-the-weekly-commit-count + */ + "GET /repos/{owner}/{repo}/stats/participation": Operation<"/repos/{owner}/{repo}/stats/participation", "get">; + /** + * @see https://docs.github.com/rest/statistics/repos#get-the-hourly-commit-count-for-each-day + */ + "GET /repos/{owner}/{repo}/stats/punch_card": Operation<"/repos/{owner}/{repo}/stats/punch_card", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-watchers + */ + "GET /repos/{owner}/{repo}/subscribers": Operation<"/repos/{owner}/{repo}/subscribers", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#get-a-repository-subscription + */ + "GET /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-tags + */ + "GET /repos/{owner}/{repo}/tags": Operation<"/repos/{owner}/{repo}/tags", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-tag-protection-state-of-a-repository + */ + "GET /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/tarball/{ref}": Operation<"/repos/{owner}/{repo}/tarball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repository-teams + */ + "GET /repos/{owner}/{repo}/teams": Operation<"/repos/{owner}/{repo}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#get-all-repository-topics + */ + "GET /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-repository-clones + */ + "GET /repos/{owner}/{repo}/traffic/clones": Operation<"/repos/{owner}/{repo}/traffic/clones", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-top-referral-paths + */ + "GET /repos/{owner}/{repo}/traffic/popular/paths": Operation<"/repos/{owner}/{repo}/traffic/popular/paths", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-top-referral-sources + */ + "GET /repos/{owner}/{repo}/traffic/popular/referrers": Operation<"/repos/{owner}/{repo}/traffic/popular/referrers", "get">; + /** + * @see https://docs.github.com/rest/metrics/traffic#get-page-views + */ + "GET /repos/{owner}/{repo}/traffic/views": Operation<"/repos/{owner}/{repo}/traffic/views", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#check-if-vulnerability-alerts-are-enabled-for-a-repository + */ + "GET /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#download-a-repository-archive + */ + "GET /repos/{owner}/{repo}/zipball/{ref}": Operation<"/repos/{owner}/{repo}/zipball/{ref}", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-public-repositories + */ + "GET /repositories": Operation<"/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#list-environment-secrets + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-public-key + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/actions#get-an-environment-secret + */ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-code + */ + "GET /search/code": Operation<"/search/code", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-commits + */ + "GET /search/commits": Operation<"/search/commits", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-issues-and-pull-requests + */ + "GET /search/issues": Operation<"/search/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-labels + */ + "GET /search/labels": Operation<"/search/labels", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-repositories + */ + "GET /search/repositories": Operation<"/search/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-topics + */ + "GET /search/topics": Operation<"/search/topics", "get">; + /** + * @see https://docs.github.com/rest/reference/search#search-users + */ + "GET /search/users": Operation<"/search/users", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#get-a-team-legacy + */ + "GET /teams/{team_id}": Operation<"/teams/{team_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussions-legacy + */ + "GET /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-discussion-comments-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-a-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-comment-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/reactions/#list-reactions-for-a-team-discussion-legacy + */ + "GET /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-pending-team-invitations-legacy + */ + "GET /teams/{team_id}/invitations": Operation<"/teams/{team_id}/invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-team-members-legacy + */ + "GET /teams/{team_id}/members": Operation<"/teams/{team_id}/members", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-member-legacy + */ + "GET /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#get-team-membership-for-a-user-legacy + */ + "GET /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-projects-legacy + */ + "GET /teams/{team_id}/projects": Operation<"/teams/{team_id}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-project-legacy + */ + "GET /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-team-repositories-legacy + */ + "GET /teams/{team_id}/repos": Operation<"/teams/{team_id}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#check-team-permissions-for-a-repository-legacy + */ + "GET /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/teams/#list-child-teams-legacy + */ + "GET /teams/{team_id}/teams": Operation<"/teams/{team_id}/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-the-authenticated-user + */ + "GET /user": Operation<"/user", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users-blocked-by-the-authenticated-user + */ + "GET /user/blocks": Operation<"/user/blocks", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-is-blocked-by-the-authenticated-user + */ + "GET /user/blocks/{username}": Operation<"/user/blocks/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-codespaces-for-the-authenticated-user + */ + "GET /user/codespaces": Operation<"/user/codespaces", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-secrets-for-the-authenticated-user + */ + "GET /user/codespaces/secrets": Operation<"/user/codespaces/secrets", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-public-key-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/public-key": Operation<"/user/codespaces/secrets/public-key", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-secret-for-the-authenticated-user + */ + "GET /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-selected-repositories-for-a-user-secret + */ + "GET /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#get-a-codespace-for-the-authenticated-user + */ + "GET /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "get">; + /** + * @see + */ + "GET /user/codespaces/{codespace_name}/exports/{export_id}": Operation<"/user/codespaces/{codespace_name}/exports/{export_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/codespaces#list-machine-types-for-a-codespace + */ + "GET /user/codespaces/{codespace_name}/machines": Operation<"/user/codespaces/{codespace_name}/machines", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-email-addresses-for-the-authenticated-user + */ + "GET /user/emails": Operation<"/user/emails", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-the-authenticated-user + */ + "GET /user/followers": Operation<"/user/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-the-authenticated-user-follows + */ + "GET /user/following": Operation<"/user/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-person-is-followed-by-the-authenticated-user + */ + "GET /user/following/{username}": Operation<"/user/following/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-the-authenticated-user + */ + "GET /user/gpg_keys": Operation<"/user/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-gpg-key-for-the-authenticated-user + */ + "GET /user/gpg_keys/{gpg_key_id}": Operation<"/user/gpg_keys/{gpg_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-app-installations-accessible-to-the-user-access-token + */ + "GET /user/installations": Operation<"/user/installations", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-repositories-accessible-to-the-user-access-token + */ + "GET /user/installations/{installation_id}/repositories": Operation<"/user/installations/{installation_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/interactions#get-interaction-restrictions-for-your-public-repositories + */ + "GET /user/interaction-limits": Operation<"/user/interaction-limits", "get">; + /** + * @see https://docs.github.com/rest/reference/issues#list-user-account-issues-assigned-to-the-authenticated-user + */ + "GET /user/issues": Operation<"/user/issues", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-keys-for-the-authenticated-user + */ + "GET /user/keys": Operation<"/user/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-public-ssh-key-for-the-authenticated-user + */ + "GET /user/keys/{key_id}": Operation<"/user/keys/{key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user + */ + "GET /user/marketplace_purchases": Operation<"/user/marketplace_purchases", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#list-subscriptions-for-the-authenticated-user-stubbed + */ + "GET /user/marketplace_purchases/stubbed": Operation<"/user/marketplace_purchases/stubbed", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organization-memberships-for-the-authenticated-user + */ + "GET /user/memberships/orgs": Operation<"/user/memberships/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#get-an-organization-membership-for-the-authenticated-user + */ + "GET /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-user-migrations + */ + "GET /user/migrations": Operation<"/user/migrations", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#get-a-user-migration-status + */ + "GET /user/migrations/{migration_id}": Operation<"/user/migrations/{migration_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#download-a-user-migration-archive + */ + "GET /user/migrations/{migration_id}/archive": Operation<"/user/migrations/{migration_id}/archive", "get">; + /** + * @see https://docs.github.com/rest/reference/migrations#list-repositories-for-a-user-migration + */ + "GET /user/migrations/{migration_id}/repositories": Operation<"/user/migrations/{migration_id}/repositories", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-the-authenticated-user + */ + "GET /user/orgs": Operation<"/user/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-the-authenticated-user + */ + "GET /user/packages": Operation<"/user/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}": Operation<"/user/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions": Operation<"/user/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-the-authenticated-user + */ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-email-addresses-for-the-authenticated-user + */ + "GET /user/public_emails": Operation<"/user/public_emails", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-the-authenticated-user + */ + "GET /user/repos": Operation<"/user/repos", "get">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#list-repository-invitations-for-the-authenticated-user + */ + "GET /user/repository_invitations": Operation<"/user/repository_invitations", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-ssh-signing-keys-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys": Operation<"/user/ssh_signing_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-ssh-signing-key-for-the-authenticated-user + */ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}": Operation<"/user/ssh_signing_keys/{ssh_signing_key_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-the-authenticated-user + */ + "GET /user/starred": Operation<"/user/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#check-if-a-repository-is-starred-by-the-authenticated-user + */ + "GET /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-the-authenticated-user + */ + "GET /user/subscriptions": Operation<"/user/subscriptions", "get">; + /** + * @see https://docs.github.com/rest/reference/teams#list-teams-for-the-authenticated-user + */ + "GET /user/teams": Operation<"/user/teams", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-users + */ + "GET /users": Operation<"/users", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-a-user + */ + "GET /users/{username}": Operation<"/users/{username}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-for-the-authenticated-user + */ + "GET /users/{username}/events": Operation<"/users/{username}/events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-organization-events-for-the-authenticated-user + */ + "GET /users/{username}/events/orgs/{org}": Operation<"/users/{username}/events/orgs/{org}", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-for-a-user + */ + "GET /users/{username}/events/public": Operation<"/users/{username}/events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-followers-of-a-user + */ + "GET /users/{username}/followers": Operation<"/users/{username}/followers", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-the-people-a-user-follows + */ + "GET /users/{username}/following": Operation<"/users/{username}/following", "get">; + /** + * @see https://docs.github.com/rest/reference/users#check-if-a-user-follows-another-user + */ + "GET /users/{username}/following/{target_user}": Operation<"/users/{username}/following/{target_user}", "get">; + /** + * @see https://docs.github.com/rest/reference/gists#list-gists-for-a-user + */ + "GET /users/{username}/gists": Operation<"/users/{username}/gists", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-gpg-keys-for-a-user + */ + "GET /users/{username}/gpg_keys": Operation<"/users/{username}/gpg_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/users#get-contextual-information-for-a-user + */ + "GET /users/{username}/hovercard": Operation<"/users/{username}/hovercard", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#get-a-user-installation-for-the-authenticated-app + */ + "GET /users/{username}/installation": Operation<"/users/{username}/installation", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-public-keys-for-a-user + */ + "GET /users/{username}/keys": Operation<"/users/{username}/keys", "get">; + /** + * @see https://docs.github.com/rest/reference/orgs#list-organizations-for-a-user + */ + "GET /users/{username}/orgs": Operation<"/users/{username}/orgs", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#list-packages-for-user + */ + "GET /users/{username}/packages": Operation<"/users/{username}/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}": Operation<"/users/{username}/packages/{package_type}/{package_name}", "get">; + /** + * @see https://docs.github.com/rest/packages#get-all-package-versions-for-a-package-owned-by-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions", "get">; + /** + * @see https://docs.github.com/rest/reference/packages#get-a-package-version-for-a-user + */ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}", "get">; + /** + * @see https://docs.github.com/rest/reference/projects#list-user-projects + */ + "GET /users/{username}/projects": Operation<"/users/{username}/projects", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-events-received-by-the-authenticated-user + */ + "GET /users/{username}/received_events": Operation<"/users/{username}/received_events", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-public-events-received-by-a-user + */ + "GET /users/{username}/received_events/public": Operation<"/users/{username}/received_events/public", "get">; + /** + * @see https://docs.github.com/rest/reference/repos#list-repositories-for-a-user + */ + "GET /users/{username}/repos": Operation<"/users/{username}/repos", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-actions-billing-for-a-user + */ + "GET /users/{username}/settings/billing/actions": Operation<"/users/{username}/settings/billing/actions", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-github-packages-billing-for-a-user + */ + "GET /users/{username}/settings/billing/packages": Operation<"/users/{username}/settings/billing/packages", "get">; + /** + * @see https://docs.github.com/rest/reference/billing#get-shared-storage-billing-for-a-user + */ + "GET /users/{username}/settings/billing/shared-storage": Operation<"/users/{username}/settings/billing/shared-storage", "get">; + /** + * @see https://docs.github.com/rest/reference/users#list-ssh-signing-keys-for-a-user + */ + "GET /users/{username}/ssh_signing_keys": Operation<"/users/{username}/ssh_signing_keys", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-starred-by-a-user + */ + "GET /users/{username}/starred": Operation<"/users/{username}/starred", "get">; + /** + * @see https://docs.github.com/rest/reference/activity#list-repositories-watched-by-a-user + */ + "GET /users/{username}/subscriptions": Operation<"/users/{username}/subscriptions", "get">; + /** + * @see + */ + "GET /zen": Operation<"/zen", "get">; + /** + * @see https://docs.github.com/rest/reference/apps#update-a-webhook-configuration-for-an-app + */ + "PATCH /app/hook/config": Operation<"/app/hook/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#reset-a-token + */ + "PATCH /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "patch">; + /** + * @see https://docs.github.com/rest/reference/actions#update-a-self-hosted-runner-group-for-an-enterprise + */ + "PATCH /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists/#update-a-gist + */ + "PATCH /gists/{gist_id}": Operation<"/gists/{gist_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/gists#update-a-gist-comment + */ + "PATCH /gists/{gist_id}/comments/{comment_id}": Operation<"/gists/{gist_id}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-a-thread-as-read + */ + "PATCH /notifications/threads/{thread_id}": Operation<"/notifications/threads/{thread_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs/#update-an-organization + */ + "PATCH /orgs/{org}": Operation<"/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/reference/actions#update-a-self-hosted-runner-group-for-an-organization + */ + "PATCH /orgs/{org}/actions/runner-groups/{runner_group_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-a-custom-role + */ + "PATCH /orgs/{org}/custom_roles/{role_id}": Operation<"/orgs/{org}/custom_roles/{role_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-webhook + */ + "PATCH /orgs/{org}/hooks/{hook_id}": Operation<"/orgs/{org}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-a-webhook-configuration-for-an-organization + */ + "PATCH /orgs/{org}/hooks/{hook_id}/config": Operation<"/orgs/{org}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-team + */ + "PATCH /orgs/{org}/teams/{team_slug}": Operation<"/orgs/{org}/teams/{team_slug}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment + */ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-card + */ + "PATCH /projects/columns/cards/{card_id}": Operation<"/projects/columns/cards/{card_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project-column + */ + "PATCH /projects/columns/{column_id}": Operation<"/projects/columns/{column_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/projects#update-a-project + */ + "PATCH /projects/{project_id}": Operation<"/projects/{project_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos/#update-a-repository + */ + "PATCH /repos/{owner}/{repo}": Operation<"/repos/{owner}/{repo}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-pull-request-review-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-status-check-protection + */ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-a-check-run + */ + "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/checks#update-repository-preferences-for-check-suites + */ + "PATCH /repos/{owner}/{repo}/check-suites/preferences": Operation<"/repos/{owner}/{repo}/check-suites/preferences", "patch">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#update-a-code-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/commits/comments#update-a-commit-comment + */ + "PATCH /repos/{owner}/{repo}/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/dependabot#update-a-dependabot-alert + */ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/dependabot/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/git#update-a-reference + */ + "PATCH /repos/{owner}/{repo}/git/refs/{ref}": Operation<"/repos/{owner}/{repo}/git/refs/{ref}", "patch">; + /** + * @see https://docs.github.com/rest/webhooks/repos#update-a-repository-webhook + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}", "patch">; + /** + * @see https://docs.github.com/rest/webhooks/repo-config#update-a-webhook-configuration-for-a-repository + */ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/config", "patch">; + /** + * @see https://docs.github.com/rest/reference/migrations#update-an-import + */ + "PATCH /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "patch">; + /** + * @see https://docs.github.com/rest/reference/migrations#map-a-commit-author + */ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}": Operation<"/repos/{owner}/{repo}/import/authors/{author_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/migrations#update-git-lfs-preference + */ + "PATCH /repos/{owner}/{repo}/import/lfs": Operation<"/repos/{owner}/{repo}/import/lfs", "patch">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#update-a-repository-invitation + */ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}": Operation<"/repos/{owner}/{repo}/invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-an-issue-comment + */ + "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues/#update-an-issue + */ + "PATCH /repos/{owner}/{repo}/issues/{issue_number}": Operation<"/repos/{owner}/{repo}/issues/{issue_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-label + */ + "PATCH /repos/{owner}/{repo}/labels/{name}": Operation<"/repos/{owner}/{repo}/labels/{name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/issues#update-a-milestone + */ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}": Operation<"/repos/{owner}/{repo}/milestones/{milestone_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-comment-for-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/pulls/#update-a-pull-request + */ + "PATCH /repos/{owner}/{repo}/pulls/{pull_number}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release-asset + */ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}": Operation<"/repos/{owner}/{repo}/releases/assets/{asset_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/repos#update-a-release + */ + "PATCH /repos/{owner}/{repo}/releases/{release_id}": Operation<"/repos/{owner}/{repo}/releases/{release_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/secret-scanning#update-a-secret-scanning-alert + */ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}": Operation<"/repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams/#update-a-team-legacy + */ + "PATCH /teams/{team_id}": Operation<"/teams/{team_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/teams#update-a-discussion-comment-legacy + */ + "PATCH /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users/#update-the-authenticated-user + */ + "PATCH /user": Operation<"/user", "patch">; + /** + * @see https://docs.github.com/rest/reference/codespaces#update-a-codespace-for-the-authenticated-user + */ + "PATCH /user/codespaces/{codespace_name}": Operation<"/user/codespaces/{codespace_name}", "patch">; + /** + * @see https://docs.github.com/rest/reference/users#set-primary-email-visibility-for-the-authenticated-user + */ + "PATCH /user/email/visibility": Operation<"/user/email/visibility", "patch">; + /** + * @see https://docs.github.com/rest/reference/orgs#update-an-organization-membership-for-the-authenticated-user + */ + "PATCH /user/memberships/orgs/{org}": Operation<"/user/memberships/orgs/{org}", "patch">; + /** + * @see https://docs.github.com/rest/collaborators/invitations#accept-a-repository-invitation + */ + "PATCH /user/repository_invitations/{invitation_id}": Operation<"/user/repository_invitations/{invitation_id}", "patch">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-github-app-from-a-manifest + */ + "POST /app-manifests/{code}/conversions": Operation<"/app-manifests/{code}/conversions", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#redeliver-a-delivery-for-an-app-webhook + */ + "POST /app/hook/deliveries/{delivery_id}/attempts": Operation<"/app/hook/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/apps/#create-an-installation-access-token-for-an-app + */ + "POST /app/installations/{installation_id}/access_tokens": Operation<"/app/installations/{installation_id}/access_tokens", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#check-a-token + */ + "POST /applications/{client_id}/token": Operation<"/applications/{client_id}/token", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#create-a-scoped-access-token + */ + "POST /applications/{client_id}/token/scoped": Operation<"/applications/{client_id}/token/scoped", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-self-hosted-runner-group-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runner-groups": Operation<"/enterprises/{enterprise}/actions/runner-groups", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runners/registration-token": Operation<"/enterprises/{enterprise}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runners/remove-token": Operation<"/enterprises/{enterprise}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-an-enterprise + */ + "POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist + */ + "POST /gists": Operation<"/gists", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#create-a-gist-comment + */ + "POST /gists/{gist_id}/comments": Operation<"/gists/{gist_id}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/gists#fork-a-gist + */ + "POST /gists/{gist_id}/forks": Operation<"/gists/{gist_id}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document + */ + "POST /markdown": Operation<"/markdown", "post">; + /** + * @see https://docs.github.com/rest/reference/markdown#render-a-markdown-document-in-raw-mode + */ + "POST /markdown/raw": Operation<"/markdown/raw", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-self-hosted-runner-group-for-an-organization + */ + "POST /orgs/{org}/actions/runner-groups": Operation<"/orgs/{org}/actions/runner-groups", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/registration-token": Operation<"/orgs/{org}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-an-organization + */ + "POST /orgs/{org}/actions/runners/remove-token": Operation<"/orgs/{org}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-an-organization + */ + "POST /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-a-custom-role + */ + "POST /orgs/{org}/custom_roles": Operation<"/orgs/{org}/custom_roles", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-webhook + */ + "POST /orgs/{org}/hooks": Operation<"/orgs/{org}/hooks", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#redeliver-a-delivery-for-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#ping-an-organization-webhook + */ + "POST /orgs/{org}/hooks/{hook_id}/pings": Operation<"/orgs/{org}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#create-an-organization-invitation + */ + "POST /orgs/{org}/invitations": Operation<"/orgs/{org}/invitations", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces + */ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop": Operation<"/orgs/{org}/members/{username}/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/reference/migrations#start-an-organization-migration + */ + "POST /orgs/{org}/migrations": Operation<"/orgs/{org}/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-an-organization + */ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-an-organization-project + */ + "POST /orgs/{org}/projects": Operation<"/orgs/{org}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-an-organization-repository + */ + "POST /orgs/{org}/repos": Operation<"/orgs/{org}/repos", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-team + */ + "POST /orgs/{org}/teams": Operation<"/orgs/{org}/teams", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions": Operation<"/orgs/{org}/teams/{team_slug}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion-comment + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-team-discussion + */ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions": Operation<"/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/orgs#enable-or-disable-security-product-on-all-org-repos + */ + "POST /orgs/{org}/{security_product}/{enablement}": Operation<"/orgs/{org}/{security_product}/{enablement}", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-card + */ + "POST /projects/columns/cards/{card_id}/moves": Operation<"/projects/columns/cards/{card_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-card + */ + "POST /projects/columns/{column_id}/cards": Operation<"/projects/columns/{column_id}/cards", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#move-a-project-column + */ + "POST /projects/columns/{column_id}/moves": Operation<"/projects/columns/{column_id}/moves", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-project-column + */ + "POST /projects/{project_id}/columns": Operation<"/projects/{project_id}/columns", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-job-for-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/jobs/{job_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-registration-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/registration-token": Operation<"/repos/{owner}/{repo}/actions/runners/registration-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-remove-token-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/remove-token": Operation<"/repos/{owner}/{repo}/actions/runners/remove-token", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#add-custom-labels-to-a-self-hosted-runner-for-a-repository + */ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#approve-a-workflow-run-for-a-fork-pull-request + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/approve", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#cancel-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/cancel", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#review-pending-deployments-for-a-workflow-run + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-a-workflow + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#re-run-workflow-failed-jobs + */ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs": Operation<"/repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs", "post">; + /** + * @see https://docs.github.com/rest/reference/actions#create-a-workflow-dispatch-event + */ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches", "post">; + /** + * @see https://docs.github.com/v3/repos#create-an-autolink + */ + "POST /repos/{owner}/{repo}/autolinks": Operation<"/repos/{owner}/{repo}/autolinks", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#set-admin-branch-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-commit-signature-protection + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-status-check-contexts + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-app-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-team-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#add-user-access-restrictions + */ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#rename-a-branch + */ + "POST /repos/{owner}/{repo}/branches/{branch}/rename": Operation<"/repos/{owner}/{repo}/branches/{branch}/rename", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs": Operation<"/repos/{owner}/{repo}/check-runs", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-run + */ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#create-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites": Operation<"/repos/{owner}/{repo}/check-suites", "post">; + /** + * @see https://docs.github.com/rest/reference/checks#rerequest-a-check-suite + */ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest": Operation<"/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", "post">; + /** + * @see https://docs.github.com/rest/reference/code-scanning#upload-a-sarif-file + */ + "POST /repos/{owner}/{repo}/code-scanning/sarifs": Operation<"/repos/{owner}/{repo}/code-scanning/sarifs", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-in-a-repository + */ + "POST /repos/{owner}/{repo}/codespaces": Operation<"/repos/{owner}/{repo}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-commit-comment + */ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/commits/comments#create-a-commit-comment + */ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments": Operation<"/repos/{owner}/{repo}/commits/{commit_sha}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/dependency-graph#create-a-snapshot-of-dependencies-for-a-repository + */ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots": Operation<"/repos/{owner}/{repo}/dependency-graph/snapshots", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-deployment + */ + "POST /repos/{owner}/{repo}/deployments": Operation<"/repos/{owner}/{repo}/deployments", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-deployment-status + */ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses": Operation<"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-dispatch-event + */ + "POST /repos/{owner}/{repo}/dispatches": Operation<"/repos/{owner}/{repo}/dispatches", "post">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#create-deployment-branch-policy + */ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-fork + */ + "POST /repos/{owner}/{repo}/forks": Operation<"/repos/{owner}/{repo}/forks", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-blob + */ + "POST /repos/{owner}/{repo}/git/blobs": Operation<"/repos/{owner}/{repo}/git/blobs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-commit + */ + "POST /repos/{owner}/{repo}/git/commits": Operation<"/repos/{owner}/{repo}/git/commits", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-reference + */ + "POST /repos/{owner}/{repo}/git/refs": Operation<"/repos/{owner}/{repo}/git/refs", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tag-object + */ + "POST /repos/{owner}/{repo}/git/tags": Operation<"/repos/{owner}/{repo}/git/tags", "post">; + /** + * @see https://docs.github.com/rest/reference/git#create-a-tree + */ + "POST /repos/{owner}/{repo}/git/trees": Operation<"/repos/{owner}/{repo}/git/trees", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#create-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks": Operation<"/repos/{owner}/{repo}/hooks", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repo-deliveries#redeliver-a-delivery-for-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#ping-a-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/pings", "post">; + /** + * @see https://docs.github.com/rest/webhooks/repos#test-the-push-repository-webhook + */ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests": Operation<"/repos/{owner}/{repo}/hooks/{hook_id}/tests", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue + */ + "POST /repos/{owner}/{repo}/issues": Operation<"/repos/{owner}/{repo}/issues", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-assignees-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/assignees", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-an-issue-comment + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#add-labels-to-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-an-issue + */ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-deploy-key + */ + "POST /repos/{owner}/{repo}/keys": Operation<"/repos/{owner}/{repo}/keys", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-label + */ + "POST /repos/{owner}/{repo}/labels": Operation<"/repos/{owner}/{repo}/labels", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#sync-a-fork-branch-with-the-upstream-repository + */ + "POST /repos/{owner}/{repo}/merge-upstream": Operation<"/repos/{owner}/{repo}/merge-upstream", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#merge-a-branch + */ + "POST /repos/{owner}/{repo}/merges": Operation<"/repos/{owner}/{repo}/merges", "post">; + /** + * @see https://docs.github.com/rest/reference/issues#create-a-milestone + */ + "POST /repos/{owner}/{repo}/milestones": Operation<"/repos/{owner}/{repo}/milestones", "post">; + /** + * @see https://docs.github.com/rest/pages#create-a-github-pages-site + */ + "POST /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "post">; + /** + * @see https://docs.github.com/rest/pages#request-a-github-pages-build + */ + "POST /repos/{owner}/{repo}/pages/builds": Operation<"/repos/{owner}/{repo}/pages/builds", "post">; + /** + * @see https://docs.github.com/rest/pages#create-a-github-pages-deployment + */ + "POST /repos/{owner}/{repo}/pages/deployment": Operation<"/repos/{owner}/{repo}/pages/deployment", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-repository-project + */ + "POST /repos/{owner}/{repo}/projects": Operation<"/repos/{owner}/{repo}/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls": Operation<"/repos/{owner}/{repo}/pulls", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions#create-reaction-for-a-pull-request-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions": Operation<"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-from-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/codespaces", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-comment-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-reply-for-a-review-comment + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#request-reviewers-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#create-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews", "post">; + /** + * @see https://docs.github.com/rest/reference/pulls#submit-a-review-for-a-pull-request + */ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-release + */ + "POST /repos/{owner}/{repo}/releases": Operation<"/repos/{owner}/{repo}/releases", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#generate-release-notes + */ + "POST /repos/{owner}/{repo}/releases/generate-notes": Operation<"/repos/{owner}/{repo}/releases/generate-notes", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-release + */ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions": Operation<"/repos/{owner}/{repo}/releases/{release_id}/reactions", "post">; + /** + * @see https://docs.github.com/rest/commits/statuses#create-a-commit-status + */ + "POST /repos/{owner}/{repo}/statuses/{sha}": Operation<"/repos/{owner}/{repo}/statuses/{sha}", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-tag-protection-state-for-a-repository + */ + "POST /repos/{owner}/{repo}/tags/protection": Operation<"/repos/{owner}/{repo}/tags/protection", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#transfer-a-repository + */ + "POST /repos/{owner}/{repo}/transfer": Operation<"/repos/{owner}/{repo}/transfer", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-using-a-template + */ + "POST /repos/{template_owner}/{template_repo}/generate": Operation<"/repos/{template_owner}/{template_repo}/generate", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-legacy + */ + "POST /teams/{team_id}/discussions": Operation<"/teams/{team_id}/discussions", "post">; + /** + * @see https://docs.github.com/rest/reference/teams#create-a-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-comment-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/reactions/#create-reaction-for-a-team-discussion-legacy + */ + "POST /teams/{team_id}/discussions/{discussion_number}/reactions": Operation<"/teams/{team_id}/discussions/{discussion_number}/reactions", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces": Operation<"/user/codespaces", "post">; + /** + * @see + */ + "POST /user/codespaces/{codespace_name}/exports": Operation<"/user/codespaces/{codespace_name}/exports", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#start-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/start": Operation<"/user/codespaces/{codespace_name}/start", "post">; + /** + * @see https://docs.github.com/rest/reference/codespaces#stop-a-codespace-for-the-authenticated-user + */ + "POST /user/codespaces/{codespace_name}/stop": Operation<"/user/codespaces/{codespace_name}/stop", "post">; + /** + * @see https://docs.github.com/rest/reference/users#add-an-email-address-for-the-authenticated-user + */ + "POST /user/emails": Operation<"/user/emails", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-gpg-key-for-the-authenticated-user + */ + "POST /user/gpg_keys": Operation<"/user/gpg_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-a-public-ssh-key-for-the-authenticated-user + */ + "POST /user/keys": Operation<"/user/keys", "post">; + /** + * @see https://docs.github.com/rest/reference/migrations#start-a-user-migration + */ + "POST /user/migrations": Operation<"/user/migrations", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/restore{?token}": Operation<"/user/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-the-authenticated-user + */ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/projects#create-a-user-project + */ + "POST /user/projects": Operation<"/user/projects", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#create-a-repository-for-the-authenticated-user + */ + "POST /user/repos": Operation<"/user/repos", "post">; + /** + * @see https://docs.github.com/rest/reference/users#create-an-ssh-signing-key-for-the-authenticated-user + */ + "POST /user/ssh_signing_keys": Operation<"/user/ssh_signing_keys", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}": Operation<"/users/{username}/packages/{package_type}/{package_name}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/packages#restore-a-package-version-for-a-user + */ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore": Operation<"/users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore", "post">; + /** + * @see https://docs.github.com/rest/reference/repos#upload-a-release-asset + */ + "POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}": Operation<"/repos/{owner}/{repo}/releases/{release_id}/assets", "post">; + /** + * @see https://docs.github.com/rest/reference/apps#suspend-an-app-installation + */ + "PUT /app/installations/{installation_id}/suspended": Operation<"/app/installations/{installation_id}/suspended", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions": Operation<"/enterprises/{enterprise}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-organizations-enabled-for-github-actions-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/organizations": Operation<"/enterprises/{enterprise}/actions/permissions/organizations", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-selected-organization-for-github-actions-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/permissions/organizations/{org_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/selected-actions": Operation<"/enterprises/{enterprise}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/permissions/workflow": Operation<"/enterprises/{enterprise}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-organization-access-to-a-self-hosted-runner-group-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-organization-access-to-a-self-hosted-runner-group-in-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations/{org_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-self-hosted-runners-in-a-group-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-a-self-hosted-runner-to-a-group-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-an-enterprise + */ + "PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels": Operation<"/enterprises/{enterprise}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/gists#star-a-gist + */ + "PUT /gists/{gist_id}/star": Operation<"/gists/{gist_id}/star", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-notifications-as-read + */ + "PUT /notifications": Operation<"/notifications", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-thread-subscription + */ + "PUT /notifications/threads/{thread_id}/subscription": Operation<"/notifications/threads/{thread_id}/subscription", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-an-organization-secret + */ + "PUT /organizations/{org}/codespaces/secrets/{secret_name}": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-an-organization-secret + */ + "PUT /organizations/{org}/codespaces/secrets/{secret_name}/repositories": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-selected-repository-to-an-organization-secret + */ + "PUT /organizations/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/organizations/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions": Operation<"/orgs/{org}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-enabled-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories": Operation<"/orgs/{org}/actions/permissions/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-selected-repository-for-github-actions-in-an-organization + */ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}": Operation<"/orgs/{org}/actions/permissions/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-an-organization + */ + "PUT /orgs/{org}/actions/permissions/selected-actions": Operation<"/orgs/{org}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions + */ + "PUT /orgs/{org}/actions/permissions/workflow": Operation<"/orgs/{org}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-repository-access-to-a-self-hosted-runner-group-in-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-self-hosted-runners-in-a-group-for-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-a-self-hosted-runner-to-a-group-for-an-organization + */ + "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}": Operation<"/orgs/{org}/actions/runner-groups/{runner_group_id}/runners/{runner_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-an-organization + */ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels": Operation<"/orgs/{org}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}": Operation<"/orgs/{org}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#block-a-user-from-an-organization + */ + "PUT /orgs/{org}/blocks/{username}": Operation<"/orgs/{org}/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#set-selected-repositories-for-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#add-selected-repository-to-an-organization-secret + */ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}": Operation<"/orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-an-organization + */ + "PUT /orgs/{org}/interaction-limits": Operation<"/orgs/{org}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-organization-membership-for-a-user + */ + "PUT /orgs/{org}/memberships/{username}": Operation<"/orgs/{org}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#convert-an-organization-member-to-outside-collaborator + */ + "PUT /orgs/{org}/outside_collaborators/{username}": Operation<"/orgs/{org}/outside_collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#set-public-organization-membership-for-the-authenticated-user + */ + "PUT /orgs/{org}/public_members/{username}": Operation<"/orgs/{org}/public_members/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/orgs#add-a-security-manager-team + */ + "PUT /orgs/{org}/security-managers/teams/{team_slug}": Operation<"/orgs/{org}/security-managers/teams/{team_slug}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user + */ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}": Operation<"/orgs/{org}/teams/{team_slug}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-project-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}": Operation<"/orgs/{org}/teams/{team_slug}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-repository-permissions + */ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}": Operation<"/orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/projects#add-project-collaborator + */ + "PUT /projects/{project_id}/collaborators/{username}": Operation<"/projects/{project_id}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-github-actions-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions": Operation<"/repos/{owner}/{repo}/actions/permissions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-workflow-access-to-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/access": Operation<"/repos/{owner}/{repo}/actions/permissions/access", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-allowed-actions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions": Operation<"/repos/{owner}/{repo}/actions/permissions/selected-actions", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-default-workflow-permissions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow": Operation<"/repos/{owner}/{repo}/actions/permissions/workflow", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#set-custom-labels-for-a-self-hosted-runner-for-a-repository + */ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels": Operation<"/repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/actions/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#disable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#enable-a-workflow + */ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable": Operation<"/repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-automated-security-fixes + */ + "PUT /repos/{owner}/{repo}/automated-security-fixes": Operation<"/repos/{owner}/{repo}/automated-security-fixes", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#update-branch-protection + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-status-check-contexts + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-app-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-team-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#set-user-access-restrictions + */ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users": Operation<"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/collaborators/collaborators#add-a-repository-collaborator + */ + "PUT /repos/{owner}/{repo}/collaborators/{username}": Operation<"/repos/{owner}/{repo}/collaborators/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#create-or-update-file-contents + */ + "PUT /repos/{owner}/{repo}/contents/{path}": Operation<"/repos/{owner}/{repo}/contents/{path}", "put">; + /** + * @see https://docs.github.com/rest/reference/dependabot#create-or-update-a-repository-secret + */ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}": Operation<"/repos/{owner}/{repo}/dependabot/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#create-or-update-an-environment + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}", "put">; + /** + * @see https://docs.github.com/rest/deployments/branch-policies#update-deployment-branch-policy + */ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}": Operation<"/repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/migrations#start-an-import + */ + "PUT /repos/{owner}/{repo}/import": Operation<"/repos/{owner}/{repo}/import", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-a-repository + */ + "PUT /repos/{owner}/{repo}/interaction-limits": Operation<"/repos/{owner}/{repo}/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#set-labels-for-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/labels", "put">; + /** + * @see https://docs.github.com/rest/reference/issues#lock-an-issue + */ + "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock": Operation<"/repos/{owner}/{repo}/issues/{issue_number}/lock", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-git-lfs-for-a-repository + */ + "PUT /repos/{owner}/{repo}/lfs": Operation<"/repos/{owner}/{repo}/lfs", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#mark-repository-notifications-as-read + */ + "PUT /repos/{owner}/{repo}/notifications": Operation<"/repos/{owner}/{repo}/notifications", "put">; + /** + * @see https://docs.github.com/rest/pages#update-information-about-a-github-pages-site + */ + "PUT /repos/{owner}/{repo}/pages": Operation<"/repos/{owner}/{repo}/pages", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#merge-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/merge", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#dismiss-a-review-for-a-pull-request + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals", "put">; + /** + * @see https://docs.github.com/rest/reference/pulls#update-a-pull-request-branch + */ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch": Operation<"/repos/{owner}/{repo}/pulls/{pull_number}/update-branch", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#set-a-repository-subscription + */ + "PUT /repos/{owner}/{repo}/subscription": Operation<"/repos/{owner}/{repo}/subscription", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#replace-all-repository-topics + */ + "PUT /repos/{owner}/{repo}/topics": Operation<"/repos/{owner}/{repo}/topics", "put">; + /** + * @see https://docs.github.com/rest/reference/repos#enable-vulnerability-alerts + */ + "PUT /repos/{owner}/{repo}/vulnerability-alerts": Operation<"/repos/{owner}/{repo}/vulnerability-alerts", "put">; + /** + * @see https://docs.github.com/rest/reference/actions#create-or-update-an-environment-secret + */ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}": Operation<"/repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-team-member-legacy + */ + "PUT /teams/{team_id}/members/{username}": Operation<"/teams/{team_id}/members/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams#add-or-update-team-membership-for-a-user-legacy + */ + "PUT /teams/{team_id}/memberships/{username}": Operation<"/teams/{team_id}/memberships/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-project-permissions-legacy + */ + "PUT /teams/{team_id}/projects/{project_id}": Operation<"/teams/{team_id}/projects/{project_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/teams/#add-or-update-team-repository-permissions-legacy + */ + "PUT /teams/{team_id}/repos/{owner}/{repo}": Operation<"/teams/{team_id}/repos/{owner}/{repo}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#block-a-user + */ + "PUT /user/blocks/{username}": Operation<"/user/blocks/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#create-or-update-a-secret-for-the-authenticated-user + */ + "PUT /user/codespaces/secrets/{secret_name}": Operation<"/user/codespaces/secrets/{secret_name}", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#set-selected-repositories-for-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories": Operation<"/user/codespaces/secrets/{secret_name}/repositories", "put">; + /** + * @see https://docs.github.com/rest/reference/codespaces#add-a-selected-repository-to-a-user-secret + */ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}": Operation<"/user/codespaces/secrets/{secret_name}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/users#follow-a-user + */ + "PUT /user/following/{username}": Operation<"/user/following/{username}", "put">; + /** + * @see https://docs.github.com/rest/reference/apps#add-a-repository-to-an-app-installation + */ + "PUT /user/installations/{installation_id}/repositories/{repository_id}": Operation<"/user/installations/{installation_id}/repositories/{repository_id}", "put">; + /** + * @see https://docs.github.com/rest/reference/interactions#set-interaction-restrictions-for-your-public-repositories + */ + "PUT /user/interaction-limits": Operation<"/user/interaction-limits", "put">; + /** + * @see https://docs.github.com/rest/reference/activity#star-a-repository-for-the-authenticated-user + */ + "PUT /user/starred/{owner}/{repo}": Operation<"/user/starred/{owner}/{repo}", "put">; +} +export {}; diff --git a/node_modules/@octokit/types/dist-types/index.d.ts b/node_modules/@octokit/types/dist-types/index.d.ts new file mode 100644 index 0000000..004ae9b --- /dev/null +++ b/node_modules/@octokit/types/dist-types/index.d.ts @@ -0,0 +1,21 @@ +export * from "./AuthInterface"; +export * from "./EndpointDefaults"; +export * from "./EndpointInterface"; +export * from "./EndpointOptions"; +export * from "./Fetch"; +export * from "./OctokitResponse"; +export * from "./RequestError"; +export * from "./RequestHeaders"; +export * from "./RequestInterface"; +export * from "./RequestMethod"; +export * from "./RequestOptions"; +export * from "./RequestParameters"; +export * from "./RequestRequestOptions"; +export * from "./ResponseHeaders"; +export * from "./Route"; +export * from "./Signal"; +export * from "./StrategyInterface"; +export * from "./Url"; +export * from "./VERSION"; +export * from "./GetResponseTypeFromEndpointMethod"; +export * from "./generated/Endpoints"; diff --git a/node_modules/@octokit/types/dist-web/index.js b/node_modules/@octokit/types/dist-web/index.js new file mode 100644 index 0000000..df0c1ec --- /dev/null +++ b/node_modules/@octokit/types/dist-web/index.js @@ -0,0 +1,4 @@ +const VERSION = "8.0.0"; + +export { VERSION }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/types/dist-web/index.js.map b/node_modules/@octokit/types/dist-web/index.js.map new file mode 100644 index 0000000..cd0e254 --- /dev/null +++ b/node_modules/@octokit/types/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/VERSION.js"],"sourcesContent":["export const VERSION = \"0.0.0-development\";\n"],"names":[],"mappings":"AAAY,MAAC,OAAO,GAAG;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/types/package.json b/node_modules/@octokit/types/package.json new file mode 100644 index 0000000..cd647ab --- /dev/null +++ b/node_modules/@octokit/types/package.json @@ -0,0 +1,54 @@ +{ + "name": "@octokit/types", + "description": "Shared TypeScript definitions for Octokit projects", + "version": "8.0.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "octokit": { + "openapi-version": "8.0.1" + }, + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "repository": "github:octokit/types.ts", + "dependencies": { + "@octokit/openapi-types": "^14.0.0" + }, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.0", + "@pika/plugin-build-web": "^0.9.0", + "@pika/plugin-ts-standard-pkg": "^0.9.0", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^3.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^11.0.0", + "lodash.set": "^4.3.2", + "npm-run-all": "^4.1.5", + "pascal-case": "^3.1.1", + "pika-plugin-merge-properties": "^1.0.6", + "prettier": "^2.0.0", + "semantic-release": "^19.0.3", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^4.2.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.23.0", + "typescript": "^4.0.2" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@octokit/webhooks-methods/LICENSE b/node_modules/@octokit/webhooks-methods/LICENSE new file mode 100644 index 0000000..57049d0 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/LICENSE @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2021 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@octokit/webhooks-methods/README.md b/node_modules/@octokit/webhooks-methods/README.md new file mode 100644 index 0000000..e31581e --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/README.md @@ -0,0 +1,193 @@ +# webhooks-methods.js + +> Methods to handle GitHub Webhook requests + +[![@latest](https://img.shields.io/npm/v/@octokit/webhooks-methods.svg)](https://www.npmjs.com/package/@octokit/webhooks-methods) +[![Build Status](https://github.com/octokit/webhooks-methods.js/workflows/Test/badge.svg)](https://github.com/octokit/webhooks-methods.js/actions?query=workflow%3ATest+branch%3Amain) + +

+Table of contents + + + +- [usage](#usage) +- [Methods](#methods) + - [`sign()`](#sign) + - [`verify()`](#verify) +- [Contributing](#contributing) +- [License](#license) + + + +
+ +## usage + + + + + + +
+ +Browsers + + + +🚧 `@octokit/webhooks-methods` is not meant to be used in browsers. The webhook secret is a sensitive credential that must not be exposed to users. + +Load `@octokit/webhooks-methods` directly from [cdn.skypack.dev](https://cdn.skypack.dev) + +```html + +``` + +
+ +Node + + + +Install with `npm install @octokit/core @octokit/webhooks-methods` + +```js +const { sign, verify } = require("@octokit/webhooks-methods"); +``` + +
+ +```js +await sign("mysecret", eventPayloadString); +// resolves with a string like "sha256=4864d2759938a15468b5df9ade20bf161da9b4f737ea61794142f3484236bda3" + +await sign({ secret: "mysecret", algorithm: "sha1" }, eventPayloadString); +// resolves with a string like "sha1=d03207e4b030cf234e3447bac4d93add4c6643d8" + +await verify("mysecret", eventPayloadString, "sha256=486d27..."); +// resolves with true or false +``` + +## Methods + +### `sign()` + +```js +await sign(secret, eventPayloadString); +await sign({ secret, algorithm }, eventPayloadString); +``` + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + algorithm + + + (String) + + + +Algorithm to calculate signature. Can be set to `sha1` or `sha256`. `sha1` is supported for legacy reasons. GitHub Enterprise Server 2.22 and older do not send the `X-Hub-Signature-256` header. Defaults to `sha256`. + +Learn more at [Validating payloads from GitHub](https://docs.github.com/en/developers/webhooks-and-events/securing-your-webhooks#validating-payloads-from-github) + +
+ + eventPayloadString + + + (String) + + + Required. + Webhook request payload as received from GitHub.
+
+ If you have only access to an already parsed object, stringify it with JSON.stringify(payload, null, 2) + '\n' +
+ +Resolves with a `signature` string. Throws an error if an argument is missing. + +### `verify()` + +```js +await verify(secret, eventPayloadString, signature); +``` + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + eventPayloadString + + + (String) + + + Required. + Webhook request payload as received from GitHub.
+
+ If you have only access to an already parsed object, stringify it with JSON.stringify(payload, null, 2) + '\n' +
+ + signature + + + (String) + + + Required. + Signature string as calculated by sign(). +
+ +Resolves with `true` or `false`. Throws error if an argument is missing. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) + +## License + +[MIT](LICENSE) diff --git a/node_modules/@octokit/webhooks-methods/dist-node/index.js b/node_modules/@octokit/webhooks-methods/dist-node/index.js new file mode 100644 index 0000000..79dcd65 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-node/index.js @@ -0,0 +1,70 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var crypto = require('crypto'); +var buffer = require('buffer'); + +var Algorithm; + +(function (Algorithm) { + Algorithm["SHA1"] = "sha1"; + Algorithm["SHA256"] = "sha256"; +})(Algorithm || (Algorithm = {})); + +const VERSION = "3.0.1"; + +async function sign(options, payload) { + const { + secret, + algorithm + } = typeof options === "object" ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256 + } : { + secret: options, + algorithm: Algorithm.SHA256 + }; + + if (!secret || !payload) { + throw new TypeError("[@octokit/webhooks-methods] secret & payload required for sign()"); + } + + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError(`[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`); + } + + return `${algorithm}=${crypto.createHmac(algorithm, secret).update(payload).digest("hex")}`; +} +sign.VERSION = VERSION; + +const getAlgorithm = signature => { + return signature.startsWith("sha256=") ? "sha256" : "sha1"; +}; + +async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError("[@octokit/webhooks-methods] secret, eventPayload & signature required"); + } + + const signatureBuffer = buffer.Buffer.from(signature); + const algorithm = getAlgorithm(signature); + const verificationBuffer = buffer.Buffer.from(await sign({ + secret, + algorithm + }, eventPayload)); + + if (signatureBuffer.length !== verificationBuffer.length) { + return false; + } // constant time comparison to prevent timing attachs + // https://stackoverflow.com/a/31096242/206879 + // https://en.wikipedia.org/wiki/Timing_attack + + + return crypto.timingSafeEqual(signatureBuffer, verificationBuffer); +} +verify.VERSION = VERSION; + +exports.sign = sign; +exports.verify = verify; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/webhooks-methods/dist-node/index.js.map b/node_modules/@octokit/webhooks-methods/dist-node/index.js.map new file mode 100644 index 0000000..c08a33a --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/types.js","../dist-src/version.js","../dist-src/node/sign.js","../dist-src/utils.js","../dist-src/node/verify.js"],"sourcesContent":["export var Algorithm;\n(function (Algorithm) {\n Algorithm[\"SHA1\"] = \"sha1\";\n Algorithm[\"SHA256\"] = \"sha256\";\n})(Algorithm || (Algorithm = {}));\n","export const VERSION = \"3.0.1\";\n","import { createHmac } from \"crypto\";\nimport { Algorithm } from \"../types\";\nimport { VERSION } from \"../version\";\nexport async function sign(options, payload) {\n const { secret, algorithm } = typeof options === \"object\"\n ? {\n secret: options.secret,\n algorithm: options.algorithm || Algorithm.SHA256,\n }\n : { secret: options, algorithm: Algorithm.SHA256 };\n if (!secret || !payload) {\n throw new TypeError(\"[@octokit/webhooks-methods] secret & payload required for sign()\");\n }\n if (!Object.values(Algorithm).includes(algorithm)) {\n throw new TypeError(`[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`);\n }\n return `${algorithm}=${createHmac(algorithm, secret)\n .update(payload)\n .digest(\"hex\")}`;\n}\nsign.VERSION = VERSION;\n","export const getAlgorithm = (signature) => {\n return signature.startsWith(\"sha256=\") ? \"sha256\" : \"sha1\";\n};\n","import { timingSafeEqual } from \"crypto\";\nimport { Buffer } from \"buffer\";\nimport { sign } from \"./sign\";\nimport { VERSION } from \"../version\";\nimport { getAlgorithm } from \"../utils\";\nexport async function verify(secret, eventPayload, signature) {\n if (!secret || !eventPayload || !signature) {\n throw new TypeError(\"[@octokit/webhooks-methods] secret, eventPayload & signature required\");\n }\n const signatureBuffer = Buffer.from(signature);\n const algorithm = getAlgorithm(signature);\n const verificationBuffer = Buffer.from(await sign({ secret, algorithm }, eventPayload));\n if (signatureBuffer.length !== verificationBuffer.length) {\n return false;\n }\n // constant time comparison to prevent timing attachs\n // https://stackoverflow.com/a/31096242/206879\n // https://en.wikipedia.org/wiki/Timing_attack\n return timingSafeEqual(signatureBuffer, verificationBuffer);\n}\nverify.VERSION = VERSION;\n"],"names":["Algorithm","VERSION","sign","options","payload","secret","algorithm","SHA256","TypeError","Object","values","includes","createHmac","update","digest","getAlgorithm","signature","startsWith","verify","eventPayload","signatureBuffer","Buffer","from","verificationBuffer","length","timingSafeEqual"],"mappings":";;;;;;;AAAO,IAAIA,SAAJ;;AACP,CAAC,UAAUA,SAAV,EAAqB;EAClBA,SAAS,CAAC,MAAD,CAAT,GAAoB,MAApB;EACAA,SAAS,CAAC,QAAD,CAAT,GAAsB,QAAtB;AACH,CAHD,EAGGA,SAAS,KAAKA,SAAS,GAAG,EAAjB,CAHZ;;ACDO,MAAMC,OAAO,GAAG,mBAAhB;;ACGA,eAAeC,IAAf,CAAoBC,OAApB,EAA6BC,OAA7B,EAAsC;EACzC,MAAM;IAAEC,MAAF;IAAUC;MAAc,OAAOH,OAAP,KAAmB,QAAnB,GACxB;IACEE,MAAM,EAAEF,OAAO,CAACE,MADlB;IAEEC,SAAS,EAAEH,OAAO,CAACG,SAAR,IAAqBN,SAAS,CAACO;GAHpB,GAKxB;IAAEF,MAAM,EAAEF,OAAV;IAAmBG,SAAS,EAAEN,SAAS,CAACO;GAL9C;;EAMA,IAAI,CAACF,MAAD,IAAW,CAACD,OAAhB,EAAyB;IACrB,MAAM,IAAII,SAAJ,CAAc,kEAAd,CAAN;;;EAEJ,IAAI,CAACC,MAAM,CAACC,MAAP,CAAcV,SAAd,EAAyBW,QAAzB,CAAkCL,SAAlC,CAAL,EAAmD;IAC/C,MAAM,IAAIE,SAAJ,CAAe,iCAAgCF,SAAU,gDAAzD,CAAN;;;EAEJ,OAAQ,GAAEA,SAAU,IAAGM,iBAAU,CAACN,SAAD,EAAYD,MAAZ,CAAV,CAClBQ,MADkB,CACXT,OADW,EAElBU,MAFkB,CAEX,KAFW,CAEJ,EAFnB;AAGH;AACDZ,IAAI,CAACD,OAAL,GAAeA,OAAf;;ACpBO,MAAMc,YAAY,GAAIC,SAAD,IAAe;EACvC,OAAOA,SAAS,CAACC,UAAV,CAAqB,SAArB,IAAkC,QAAlC,GAA6C,MAApD;AACH,CAFM;;ACKA,eAAeC,MAAf,CAAsBb,MAAtB,EAA8Bc,YAA9B,EAA4CH,SAA5C,EAAuD;EAC1D,IAAI,CAACX,MAAD,IAAW,CAACc,YAAZ,IAA4B,CAACH,SAAjC,EAA4C;IACxC,MAAM,IAAIR,SAAJ,CAAc,uEAAd,CAAN;;;EAEJ,MAAMY,eAAe,GAAGC,aAAM,CAACC,IAAP,CAAYN,SAAZ,CAAxB;EACA,MAAMV,SAAS,GAAGS,YAAY,CAACC,SAAD,CAA9B;EACA,MAAMO,kBAAkB,GAAGF,aAAM,CAACC,IAAP,CAAY,MAAMpB,IAAI,CAAC;IAAEG,MAAF;IAAUC;GAAX,EAAwBa,YAAxB,CAAtB,CAA3B;;EACA,IAAIC,eAAe,CAACI,MAAhB,KAA2BD,kBAAkB,CAACC,MAAlD,EAA0D;IACtD,OAAO,KAAP;GARsD;;;;;EAa1D,OAAOC,sBAAe,CAACL,eAAD,EAAkBG,kBAAlB,CAAtB;AACH;AACDL,MAAM,CAACjB,OAAP,GAAiBA,OAAjB;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/webhooks-methods/dist-src/index.js b/node_modules/@octokit/webhooks-methods/dist-src/index.js new file mode 100644 index 0000000..18d0139 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/index.js @@ -0,0 +1,2 @@ +export { sign } from "./node/sign"; +export { verify } from "./node/verify"; diff --git a/node_modules/@octokit/webhooks-methods/dist-src/node/sign.js b/node_modules/@octokit/webhooks-methods/dist-src/node/sign.js new file mode 100644 index 0000000..6c4f6a0 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/node/sign.js @@ -0,0 +1,21 @@ +import { createHmac } from "crypto"; +import { Algorithm } from "../types"; +import { VERSION } from "../version"; +export async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" + ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256, + } + : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError("[@octokit/webhooks-methods] secret & payload required for sign()"); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError(`[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`); + } + return `${algorithm}=${createHmac(algorithm, secret) + .update(payload) + .digest("hex")}`; +} +sign.VERSION = VERSION; diff --git a/node_modules/@octokit/webhooks-methods/dist-src/node/verify.js b/node_modules/@octokit/webhooks-methods/dist-src/node/verify.js new file mode 100644 index 0000000..e26ac0a --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/node/verify.js @@ -0,0 +1,21 @@ +import { timingSafeEqual } from "crypto"; +import { Buffer } from "buffer"; +import { sign } from "./sign"; +import { VERSION } from "../version"; +import { getAlgorithm } from "../utils"; +export async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError("[@octokit/webhooks-methods] secret, eventPayload & signature required"); + } + const signatureBuffer = Buffer.from(signature); + const algorithm = getAlgorithm(signature); + const verificationBuffer = Buffer.from(await sign({ secret, algorithm }, eventPayload)); + if (signatureBuffer.length !== verificationBuffer.length) { + return false; + } + // constant time comparison to prevent timing attachs + // https://stackoverflow.com/a/31096242/206879 + // https://en.wikipedia.org/wiki/Timing_attack + return timingSafeEqual(signatureBuffer, verificationBuffer); +} +verify.VERSION = VERSION; diff --git a/node_modules/@octokit/webhooks-methods/dist-src/types.js b/node_modules/@octokit/webhooks-methods/dist-src/types.js new file mode 100644 index 0000000..d4443bf --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/types.js @@ -0,0 +1,5 @@ +export var Algorithm; +(function (Algorithm) { + Algorithm["SHA1"] = "sha1"; + Algorithm["SHA256"] = "sha256"; +})(Algorithm || (Algorithm = {})); diff --git a/node_modules/@octokit/webhooks-methods/dist-src/utils.js b/node_modules/@octokit/webhooks-methods/dist-src/utils.js new file mode 100644 index 0000000..9c5e1fb --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/utils.js @@ -0,0 +1,3 @@ +export const getAlgorithm = (signature) => { + return signature.startsWith("sha256=") ? "sha256" : "sha1"; +}; diff --git a/node_modules/@octokit/webhooks-methods/dist-src/version.js b/node_modules/@octokit/webhooks-methods/dist-src/version.js new file mode 100644 index 0000000..b7ea020 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/version.js @@ -0,0 +1 @@ +export const VERSION = "3.0.1"; diff --git a/node_modules/@octokit/webhooks-methods/dist-src/web.js b/node_modules/@octokit/webhooks-methods/dist-src/web.js new file mode 100644 index 0000000..861f198 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-src/web.js @@ -0,0 +1,57 @@ +import { Algorithm } from "./types"; +import { getAlgorithm } from "./utils"; +const enc = new TextEncoder(); +function hexToUInt8Array(string) { + // convert string to pairs of 2 characters + const pairs = string.match(/[\dA-F]{2}/gi); + // convert the octets to integers + const integers = pairs.map(function (s) { + return parseInt(s, 16); + }); + return new Uint8Array(integers); +} +function UInt8ArrayToHex(signature) { + return Array.prototype.map + .call(new Uint8Array(signature), (x) => x.toString(16).padStart(2, "0")) + .join(""); +} +function getHMACHashName(algorithm) { + return { + [Algorithm.SHA1]: "SHA-1", + [Algorithm.SHA256]: "SHA-256", + }[algorithm]; +} +async function importKey(secret, algorithm) { + // ref: https://developer.mozilla.org/en-US/docs/Web/API/HmacImportParams + return crypto.subtle.importKey("raw", // raw format of the key - should be Uint8Array + enc.encode(secret), { + // algorithm details + name: "HMAC", + hash: { name: getHMACHashName(algorithm) }, + }, false, // export = false + ["sign", "verify"] // what this key can do + ); +} +export async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" + ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256, + } + : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError("[@octokit/webhooks-methods] secret & payload required for sign()"); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError(`[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`); + } + const signature = await crypto.subtle.sign("HMAC", await importKey(secret, algorithm), enc.encode(payload)); + return `${algorithm}=${UInt8ArrayToHex(signature)}`; +} +export async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError("[@octokit/webhooks-methods] secret, eventPayload & signature required"); + } + const algorithm = getAlgorithm(signature); + return await crypto.subtle.verify("HMAC", await importKey(secret, algorithm), hexToUInt8Array(signature.replace(`${algorithm}=`, "")), enc.encode(eventPayload)); +} diff --git a/node_modules/@octokit/webhooks-methods/dist-types/index.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/index.d.ts new file mode 100644 index 0000000..18d0139 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export { sign } from "./node/sign"; +export { verify } from "./node/verify"; diff --git a/node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts new file mode 100644 index 0000000..688ae0e --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/node/sign.d.ts @@ -0,0 +1,5 @@ +import { SignOptions } from "../types"; +export declare function sign(options: SignOptions | string, payload: string): Promise; +export declare namespace sign { + var VERSION: string; +} diff --git a/node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts new file mode 100644 index 0000000..1d475a4 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/node/verify.d.ts @@ -0,0 +1,4 @@ +export declare function verify(secret: string, eventPayload: string, signature: string): Promise; +export declare namespace verify { + var VERSION: string; +} diff --git a/node_modules/@octokit/webhooks-methods/dist-types/types.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/types.d.ts new file mode 100644 index 0000000..182baec --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/types.d.ts @@ -0,0 +1,9 @@ +export declare enum Algorithm { + SHA1 = "sha1", + SHA256 = "sha256" +} +export declare type AlgorithmLike = Algorithm | "sha1" | "sha256"; +export declare type SignOptions = { + secret: string; + algorithm?: AlgorithmLike; +}; diff --git a/node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts new file mode 100644 index 0000000..5bab855 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/utils.d.ts @@ -0,0 +1 @@ +export declare const getAlgorithm: (signature: string) => "sha1" | "sha256"; diff --git a/node_modules/@octokit/webhooks-methods/dist-types/version.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/version.d.ts new file mode 100644 index 0000000..86e4c6d --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/version.d.ts @@ -0,0 +1 @@ +export declare const VERSION = "3.0.1"; diff --git a/node_modules/@octokit/webhooks-methods/dist-types/web.d.ts b/node_modules/@octokit/webhooks-methods/dist-types/web.d.ts new file mode 100644 index 0000000..13d910e --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-types/web.d.ts @@ -0,0 +1,3 @@ +import { SignOptions } from "./types"; +export declare function sign(options: SignOptions | string, payload: string): Promise; +export declare function verify(secret: string, eventPayload: string, signature: string): Promise; diff --git a/node_modules/@octokit/webhooks-methods/dist-web/index.js b/node_modules/@octokit/webhooks-methods/dist-web/index.js new file mode 100644 index 0000000..d609ef0 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-web/index.js @@ -0,0 +1,68 @@ +var Algorithm; +(function (Algorithm) { + Algorithm["SHA1"] = "sha1"; + Algorithm["SHA256"] = "sha256"; +})(Algorithm || (Algorithm = {})); + +const getAlgorithm = (signature) => { + return signature.startsWith("sha256=") ? "sha256" : "sha1"; +}; + +const enc = new TextEncoder(); +function hexToUInt8Array(string) { + // convert string to pairs of 2 characters + const pairs = string.match(/[\dA-F]{2}/gi); + // convert the octets to integers + const integers = pairs.map(function (s) { + return parseInt(s, 16); + }); + return new Uint8Array(integers); +} +function UInt8ArrayToHex(signature) { + return Array.prototype.map + .call(new Uint8Array(signature), (x) => x.toString(16).padStart(2, "0")) + .join(""); +} +function getHMACHashName(algorithm) { + return { + [Algorithm.SHA1]: "SHA-1", + [Algorithm.SHA256]: "SHA-256", + }[algorithm]; +} +async function importKey(secret, algorithm) { + // ref: https://developer.mozilla.org/en-US/docs/Web/API/HmacImportParams + return crypto.subtle.importKey("raw", // raw format of the key - should be Uint8Array + enc.encode(secret), { + // algorithm details + name: "HMAC", + hash: { name: getHMACHashName(algorithm) }, + }, false, // export = false + ["sign", "verify"] // what this key can do + ); +} +async function sign(options, payload) { + const { secret, algorithm } = typeof options === "object" + ? { + secret: options.secret, + algorithm: options.algorithm || Algorithm.SHA256, + } + : { secret: options, algorithm: Algorithm.SHA256 }; + if (!secret || !payload) { + throw new TypeError("[@octokit/webhooks-methods] secret & payload required for sign()"); + } + if (!Object.values(Algorithm).includes(algorithm)) { + throw new TypeError(`[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`); + } + const signature = await crypto.subtle.sign("HMAC", await importKey(secret, algorithm), enc.encode(payload)); + return `${algorithm}=${UInt8ArrayToHex(signature)}`; +} +async function verify(secret, eventPayload, signature) { + if (!secret || !eventPayload || !signature) { + throw new TypeError("[@octokit/webhooks-methods] secret, eventPayload & signature required"); + } + const algorithm = getAlgorithm(signature); + return await crypto.subtle.verify("HMAC", await importKey(secret, algorithm), hexToUInt8Array(signature.replace(`${algorithm}=`, "")), enc.encode(eventPayload)); +} + +export { sign, verify }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/webhooks-methods/dist-web/index.js.map b/node_modules/@octokit/webhooks-methods/dist-web/index.js.map new file mode 100644 index 0000000..ec3812d --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/types.js","../dist-src/utils.js","../dist-src/web.js"],"sourcesContent":["export var Algorithm;\n(function (Algorithm) {\n Algorithm[\"SHA1\"] = \"sha1\";\n Algorithm[\"SHA256\"] = \"sha256\";\n})(Algorithm || (Algorithm = {}));\n","export const getAlgorithm = (signature) => {\n return signature.startsWith(\"sha256=\") ? \"sha256\" : \"sha1\";\n};\n","import { Algorithm } from \"./types\";\nimport { getAlgorithm } from \"./utils\";\nconst enc = new TextEncoder();\nfunction hexToUInt8Array(string) {\n // convert string to pairs of 2 characters\n const pairs = string.match(/[\\dA-F]{2}/gi);\n // convert the octets to integers\n const integers = pairs.map(function (s) {\n return parseInt(s, 16);\n });\n return new Uint8Array(integers);\n}\nfunction UInt8ArrayToHex(signature) {\n return Array.prototype.map\n .call(new Uint8Array(signature), (x) => x.toString(16).padStart(2, \"0\"))\n .join(\"\");\n}\nfunction getHMACHashName(algorithm) {\n return {\n [Algorithm.SHA1]: \"SHA-1\",\n [Algorithm.SHA256]: \"SHA-256\",\n }[algorithm];\n}\nasync function importKey(secret, algorithm) {\n // ref: https://developer.mozilla.org/en-US/docs/Web/API/HmacImportParams\n return crypto.subtle.importKey(\"raw\", // raw format of the key - should be Uint8Array\n enc.encode(secret), {\n // algorithm details\n name: \"HMAC\",\n hash: { name: getHMACHashName(algorithm) },\n }, false, // export = false\n [\"sign\", \"verify\"] // what this key can do\n );\n}\nexport async function sign(options, payload) {\n const { secret, algorithm } = typeof options === \"object\"\n ? {\n secret: options.secret,\n algorithm: options.algorithm || Algorithm.SHA256,\n }\n : { secret: options, algorithm: Algorithm.SHA256 };\n if (!secret || !payload) {\n throw new TypeError(\"[@octokit/webhooks-methods] secret & payload required for sign()\");\n }\n if (!Object.values(Algorithm).includes(algorithm)) {\n throw new TypeError(`[@octokit/webhooks] Algorithm ${algorithm} is not supported. Must be 'sha1' or 'sha256'`);\n }\n const signature = await crypto.subtle.sign(\"HMAC\", await importKey(secret, algorithm), enc.encode(payload));\n return `${algorithm}=${UInt8ArrayToHex(signature)}`;\n}\nexport async function verify(secret, eventPayload, signature) {\n if (!secret || !eventPayload || !signature) {\n throw new TypeError(\"[@octokit/webhooks-methods] secret, eventPayload & signature required\");\n }\n const algorithm = getAlgorithm(signature);\n return await crypto.subtle.verify(\"HMAC\", await importKey(secret, algorithm), hexToUInt8Array(signature.replace(`${algorithm}=`, \"\")), enc.encode(eventPayload));\n}\n"],"names":[],"mappings":"AAAO,IAAI,SAAS,CAAC;AACrB,CAAC,UAAU,SAAS,EAAE;AACtB,IAAI,SAAS,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC;AAC/B,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,QAAQ,CAAC;AACnC,CAAC,EAAE,SAAS,KAAK,SAAS,GAAG,EAAE,CAAC,CAAC;;ACJ1B,MAAM,YAAY,GAAG,CAAC,SAAS,KAAK;AAC3C,IAAI,OAAO,SAAS,CAAC,UAAU,CAAC,SAAS,CAAC,GAAG,QAAQ,GAAG,MAAM,CAAC;AAC/D,CAAC,CAAC;;ACAF,MAAM,GAAG,GAAG,IAAI,WAAW,EAAE,CAAC;AAC9B,SAAS,eAAe,CAAC,MAAM,EAAE;AACjC;AACA,IAAI,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;AAC/C;AACA,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE;AAC5C,QAAQ,OAAO,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AAC/B,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,IAAI,UAAU,CAAC,QAAQ,CAAC,CAAC;AACpC,CAAC;AACD,SAAS,eAAe,CAAC,SAAS,EAAE;AACpC,IAAI,OAAO,KAAK,CAAC,SAAS,CAAC,GAAG;AAC9B,SAAS,IAAI,CAAC,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAChF,SAAS,IAAI,CAAC,EAAE,CAAC,CAAC;AAClB,CAAC;AACD,SAAS,eAAe,CAAC,SAAS,EAAE;AACpC,IAAI,OAAO;AACX,QAAQ,CAAC,SAAS,CAAC,IAAI,GAAG,OAAO;AACjC,QAAQ,CAAC,SAAS,CAAC,MAAM,GAAG,SAAS;AACrC,KAAK,CAAC,SAAS,CAAC,CAAC;AACjB,CAAC;AACD,eAAe,SAAS,CAAC,MAAM,EAAE,SAAS,EAAE;AAC5C;AACA,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK;AACxC,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;AACxB;AACA,QAAQ,IAAI,EAAE,MAAM;AACpB,QAAQ,IAAI,EAAE,EAAE,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,EAAE;AAClD,KAAK,EAAE,KAAK;AACZ,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC;AACtB,KAAK,CAAC;AACN,CAAC;AACD,AAAO,eAAe,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE;AAC7C,IAAI,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,GAAG,OAAO,OAAO,KAAK,QAAQ;AAC7D,UAAU;AACV,YAAY,MAAM,EAAE,OAAO,CAAC,MAAM;AAClC,YAAY,SAAS,EAAE,OAAO,CAAC,SAAS,IAAI,SAAS,CAAC,MAAM;AAC5D,SAAS;AACT,UAAU,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,CAAC,MAAM,EAAE,CAAC;AAC3D,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,OAAO,EAAE;AAC7B,QAAQ,MAAM,IAAI,SAAS,CAAC,kEAAkE,CAAC,CAAC;AAChG,KAAK;AACL,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;AACvD,QAAQ,MAAM,IAAI,SAAS,CAAC,CAAC,8BAA8B,EAAE,SAAS,CAAC,8CAA8C,CAAC,CAAC,CAAC;AACxH,KAAK;AACL,IAAI,MAAM,SAAS,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC;AAChH,IAAI,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE,eAAe,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AACxD,CAAC;AACD,AAAO,eAAe,MAAM,CAAC,MAAM,EAAE,YAAY,EAAE,SAAS,EAAE;AAC9D,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,YAAY,IAAI,CAAC,SAAS,EAAE;AAChD,QAAQ,MAAM,IAAI,SAAS,CAAC,uEAAuE,CAAC,CAAC;AACrG,KAAK;AACL,IAAI,MAAM,SAAS,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;AAC9C,IAAI,OAAO,MAAM,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE,eAAe,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;AACrK,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/webhooks-methods/package.json b/node_modules/@octokit/webhooks-methods/package.json new file mode 100644 index 0000000..7c6e9a1 --- /dev/null +++ b/node_modules/@octokit/webhooks-methods/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/webhooks-methods", + "description": "Methods to handle GitHub Webhook requests", + "version": "3.0.1", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "pika": true, + "sideEffects": false, + "keywords": [ + "github", + "api", + "sdk", + "toolkit" + ], + "repository": "github:octokit/webhooks-methods.js", + "dependencies": {}, + "peerDependencies": {}, + "devDependencies": { + "@gr2m/pika-plugin-build-web": "^0.6.0-issue-84.2", + "@octokit/tsconfig": "^1.0.2", + "@pika/pack": "^0.5.0", + "@pika/plugin-build-node": "^0.9.2", + "@pika/plugin-ts-standard-pkg": "^0.9.2", + "@types/jest": "^29.0.0", + "@types/node": "^16.0.0", + "jest": "^29.0.0", + "prettier": "2.7.1", + "puppeteer": "^18.0.0", + "semantic-release": "^19.0.0", + "semantic-release-plugin-update-version-in-files": "^1.1.0", + "ts-jest": "^29.0.0", + "typescript": "^4.2.4" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + }, + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js" +} diff --git a/node_modules/@octokit/webhooks-types/README.md b/node_modules/@octokit/webhooks-types/README.md new file mode 100644 index 0000000..fd19f32 --- /dev/null +++ b/node_modules/@octokit/webhooks-types/README.md @@ -0,0 +1,46 @@ +# Octokit Webhooks + +> machine-readable, always up-to-date GitHub Webhooks specifications + +![Update status](https://github.com/octokit/webhooks/workflows/Update/badge.svg) + +## Download + +Download the latest specification at +[octokit.github.io/webhooks/payload-examples/api.github.com/index.json](https://octokit.github.io/webhooks/payload-examples/api.github.com/index.json) + +## Usage + +This package ships with types for the webhook events generated from the +respective json schemas, which you can use like so: + +```typescript +import { WebhookEvent, IssuesOpenedEvent } from "@octokit/webhooks-types"; + +const handleWebhookEvent = (event: WebhookEvent) => { + if ("action" in event && event.action === "completed") { + console.log(`${event.sender.login} completed something!`); + } +}; + +const handleIssuesOpenedEvent = (event: IssuesOpenedEvent) => { + console.log( + `${event.sender.login} opened "${event.issue.title}" on ${event.repository.full_name}` + ); +}; +``` + +**⚠️ Caution ⚠️**: Webhooks Types are expected to be used with the [`strictNullChecks` option](https://www.typescriptlang.org/tsconfig#strictNullChecks) enabled in your `tsconfig`. If you don't have this option enabled, there's the possibility that you get `never` as the inferred type in some use cases. See [#395](https://github.com/octokit/webhooks/issues/395) for details. + +## See also + +- [octokit/graphql-schema](https://github.com/octokit/graphql-schema) – GitHub’s + GraphQL Schema with validation +- [octokit/openapi](https://github.com/octokit/openapi) – GitHub REST API route + specifications +- [octokit/app-permissions](https://github.com/octokit/app-permissions) – GitHub + App permission specifications + +## LICENSE + +[MIT](LICENSE.md) diff --git a/node_modules/@octokit/webhooks-types/package.json b/node_modules/@octokit/webhooks-types/package.json new file mode 100644 index 0000000..eaabee5 --- /dev/null +++ b/node_modules/@octokit/webhooks-types/package.json @@ -0,0 +1,20 @@ +{ + "name": "@octokit/webhooks-types", + "version": "6.5.0", + "description": "Generated TypeScript definitions based on community contributed JSON Schemas", + "keywords": [], + "repository": "github:octokit/webhooks", + "license": "MIT", + "author": "Gregor Martynus (https://github.com/gr2m)", + "main": "", + "types": "schema.d.ts", + "files": [ + "schema.d.ts" + ], + "scripts": {}, + "dependencies": {}, + "devDependencies": {}, + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@octokit/webhooks-types/schema.d.ts b/node_modules/@octokit/webhooks-types/schema.d.ts new file mode 100644 index 0000000..cb9f9c6 --- /dev/null +++ b/node_modules/@octokit/webhooks-types/schema.d.ts @@ -0,0 +1,7443 @@ +/* tslint:disable */ +/** + * This file was automatically generated by json-schema-to-typescript. + * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, + * and run json-schema-to-typescript to regenerate this file. + */ + +export type Schema = + | BranchProtectionRuleEvent + | CheckRunEvent + | CheckSuiteEvent + | CodeScanningAlertEvent + | CommitCommentEvent + | CreateEvent + | DeleteEvent + | DependabotAlertEvent + | DeployKeyEvent + | DeploymentEvent + | DeploymentStatusEvent + | DiscussionEvent + | DiscussionCommentEvent + | ForkEvent + | GithubAppAuthorizationEvent + | GollumEvent + | InstallationEvent + | InstallationRepositoriesEvent + | IssueCommentEvent + | IssuesEvent + | LabelEvent + | MarketplacePurchaseEvent + | MemberEvent + | MembershipEvent + | MergeGroupEvent + | MetaEvent + | MilestoneEvent + | OrgBlockEvent + | OrganizationEvent + | PackageEvent + | PageBuildEvent + | PingEvent + | ProjectEvent + | ProjectCardEvent + | ProjectColumnEvent + | ProjectsV2ItemEvent + | PublicEvent + | PullRequestEvent + | PullRequestReviewEvent + | PullRequestReviewCommentEvent + | PullRequestReviewThreadEvent + | PushEvent + | ReleaseEvent + | RepositoryEvent + | RepositoryDispatchEvent + | RepositoryImportEvent + | RepositoryVulnerabilityAlertEvent + | SecretScanningAlertEvent + | SecurityAdvisoryEvent + | SponsorshipEvent + | StarEvent + | StatusEvent + | TeamEvent + | TeamAddEvent + | WatchEvent + | WorkflowDispatchEvent + | WorkflowJobEvent + | WorkflowRunEvent; +export type BranchProtectionRuleEvent = + | BranchProtectionRuleCreatedEvent + | BranchProtectionRuleDeletedEvent + | BranchProtectionRuleEditedEvent; +export type BranchProtectionRuleEnforcementLevel = + | "off" + | "non_admins" + | "everyone"; +export type BranchProtectionRuleNumber = number; +export type BranchProtectionRuleBoolean = boolean; +export type BranchProtectionRuleArray = string[]; +export type CheckRunEvent = + | CheckRunCompletedEvent + | CheckRunCreatedEvent + | CheckRunRequestedActionEvent + | CheckRunRerequestedEvent; +export type CheckSuiteEvent = + | CheckSuiteCompletedEvent + | CheckSuiteRequestedEvent + | CheckSuiteRerequestedEvent; +export type CodeScanningAlertEvent = + | CodeScanningAlertAppearedInBranchEvent + | CodeScanningAlertClosedByUserEvent + | CodeScanningAlertCreatedEvent + | CodeScanningAlertFixedEvent + | CodeScanningAlertReopenedEvent + | CodeScanningAlertReopenedByUserEvent; +export type CommitCommentEvent = CommitCommentCreatedEvent; +/** + * How the author is associated with the repository. + */ +export type AuthorAssociation = + | "COLLABORATOR" + | "CONTRIBUTOR" + | "FIRST_TIMER" + | "FIRST_TIME_CONTRIBUTOR" + | "MANNEQUIN" + | "MEMBER" + | "NONE" + | "OWNER"; +export type DependabotAlertEvent = + | DependabotAlertCreatedEvent + | DependabotAlertDismissedEvent + | DependabotAlertFixedEvent + | DependabotAlertReintroducedEvent + | DependabotAlertReopenedEvent; +export type DeployKeyEvent = DeployKeyCreatedEvent | DeployKeyDeletedEvent; +export type DeploymentEvent = DeploymentCreatedEvent; +export type DeploymentStatusEvent = DeploymentStatusCreatedEvent; +export type DiscussionEvent = + | DiscussionAnsweredEvent + | DiscussionCategoryChangedEvent + | DiscussionCreatedEvent + | DiscussionDeletedEvent + | DiscussionEditedEvent + | DiscussionLabeledEvent + | DiscussionLockedEvent + | DiscussionPinnedEvent + | DiscussionTransferredEvent + | DiscussionUnansweredEvent + | DiscussionUnlabeledEvent + | DiscussionUnlockedEvent + | DiscussionUnpinnedEvent; +export type DiscussionCommentEvent = + | DiscussionCommentCreatedEvent + | DiscussionCommentDeletedEvent + | DiscussionCommentEditedEvent; +export type GithubAppAuthorizationEvent = GithubAppAuthorizationRevokedEvent; +export type InstallationEvent = + | InstallationCreatedEvent + | InstallationDeletedEvent + | InstallationNewPermissionsAcceptedEvent + | InstallationSuspendEvent + | InstallationUnsuspendEvent; +export type InstallationRepositoriesEvent = + | InstallationRepositoriesAddedEvent + | InstallationRepositoriesRemovedEvent; +export type IssueCommentEvent = + | IssueCommentCreatedEvent + | IssueCommentDeletedEvent + | IssueCommentEditedEvent; +export type IssuesEvent = + | IssuesAssignedEvent + | IssuesClosedEvent + | IssuesDeletedEvent + | IssuesDemilestonedEvent + | IssuesEditedEvent + | IssuesLabeledEvent + | IssuesLockedEvent + | IssuesMilestonedEvent + | IssuesOpenedEvent + | IssuesPinnedEvent + | IssuesReopenedEvent + | IssuesTransferredEvent + | IssuesUnassignedEvent + | IssuesUnlabeledEvent + | IssuesUnlockedEvent + | IssuesUnpinnedEvent; +export type LabelEvent = + | LabelCreatedEvent + | LabelDeletedEvent + | LabelEditedEvent; +export type MarketplacePurchaseEvent = + | MarketplacePurchaseCancelledEvent + | MarketplacePurchaseChangedEvent + | MarketplacePurchasePendingChangeEvent + | MarketplacePurchasePendingChangeCancelledEvent + | MarketplacePurchasePurchasedEvent; +export type MemberEvent = + | MemberAddedEvent + | MemberEditedEvent + | MemberRemovedEvent; +export type MembershipEvent = MembershipAddedEvent | MembershipRemovedEvent; +export type MergeGroupEvent = MergeGroupChecksRequestedEvent; +export type MetaEvent = MetaDeletedEvent; +export type WebhookEvents = + | ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "create" + | "delete" + | "deployment" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "meta" + | "milestone" + | "organization" + | "org_block" + | "package" + | "page_build" + | "project" + | "projects_v2_item" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "pull_request_review_thread" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_import" + | "repository_vulnerability_alert" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "security_and_analysis" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_job" + | "workflow_run" + )[] + | ["*"]; +export type MilestoneEvent = + | MilestoneClosedEvent + | MilestoneCreatedEvent + | MilestoneDeletedEvent + | MilestoneEditedEvent + | MilestoneOpenedEvent; +export type OrgBlockEvent = OrgBlockBlockedEvent | OrgBlockUnblockedEvent; +export type OrganizationEvent = + | OrganizationDeletedEvent + | OrganizationMemberAddedEvent + | OrganizationMemberInvitedEvent + | OrganizationMemberRemovedEvent + | OrganizationRenamedEvent; +export type PackageEvent = PackagePublishedEvent | PackageUpdatedEvent; +export type ProjectEvent = + | ProjectClosedEvent + | ProjectCreatedEvent + | ProjectDeletedEvent + | ProjectEditedEvent + | ProjectReopenedEvent; +export type ProjectCardEvent = + | ProjectCardConvertedEvent + | ProjectCardCreatedEvent + | ProjectCardDeletedEvent + | ProjectCardEditedEvent + | ProjectCardMovedEvent; +export type ProjectColumnEvent = + | ProjectColumnCreatedEvent + | ProjectColumnDeletedEvent + | ProjectColumnEditedEvent + | ProjectColumnMovedEvent; +export type ProjectsV2ItemEvent = + | ProjectsV2ItemArchivedEvent + | ProjectsV2ItemConvertedEvent + | ProjectsV2ItemCreatedEvent + | ProjectsV2ItemDeletedEvent + | ProjectsV2ItemEditedEvent + | ProjectsV2ItemReorderedEvent + | ProjectsV2ItemRestoredEvent; +export type PullRequestEvent = + | PullRequestAssignedEvent + | PullRequestAutoMergeDisabledEvent + | PullRequestAutoMergeEnabledEvent + | PullRequestClosedEvent + | PullRequestConvertedToDraftEvent + | PullRequestDequeuedEvent + | PullRequestEditedEvent + | PullRequestLabeledEvent + | PullRequestLockedEvent + | PullRequestOpenedEvent + | PullRequestQueuedEvent + | PullRequestReadyForReviewEvent + | PullRequestReopenedEvent + | PullRequestReviewRequestRemovedEvent + | PullRequestReviewRequestedEvent + | PullRequestSynchronizeEvent + | PullRequestUnassignedEvent + | PullRequestUnlabeledEvent + | PullRequestUnlockedEvent; +export type PullRequestReviewRequestRemovedEvent = + | { + action: "review_request_removed"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_reviewer: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + } + | { + action: "review_request_removed"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_team: Team; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + }; +export type PullRequestReviewRequestedEvent = + | { + action: "review_requested"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_reviewer: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + } + | { + action: "review_requested"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + requested_team: Team; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; + }; +export type PullRequestReviewEvent = + | PullRequestReviewDismissedEvent + | PullRequestReviewEditedEvent + | PullRequestReviewSubmittedEvent; +export type PullRequestReviewCommentEvent = + | PullRequestReviewCommentCreatedEvent + | PullRequestReviewCommentDeletedEvent + | PullRequestReviewCommentEditedEvent; +export type PullRequestReviewThreadEvent = + | PullRequestReviewThreadResolvedEvent + | PullRequestReviewThreadUnresolvedEvent; +export type ReleaseEvent = + | ReleaseCreatedEvent + | ReleaseDeletedEvent + | ReleaseEditedEvent + | ReleasePrereleasedEvent + | ReleasePublishedEvent + | ReleaseReleasedEvent + | ReleaseUnpublishedEvent; +export type RepositoryEvent = + | RepositoryArchivedEvent + | RepositoryCreatedEvent + | RepositoryDeletedEvent + | RepositoryEditedEvent + | RepositoryPrivatizedEvent + | RepositoryPublicizedEvent + | RepositoryRenamedEvent + | RepositoryTransferredEvent + | RepositoryUnarchivedEvent; +export type RepositoryVulnerabilityAlertEvent = + | RepositoryVulnerabilityAlertCreateEvent + | RepositoryVulnerabilityAlertDismissEvent + | RepositoryVulnerabilityAlertReopenEvent + | RepositoryVulnerabilityAlertResolveEvent; +export type SecretScanningAlertEvent = + | SecretScanningAlertCreatedEvent + | SecretScanningAlertReopenedEvent + | SecretScanningAlertResolvedEvent; +export type SecurityAdvisoryEvent = + | SecurityAdvisoryPerformedEvent + | SecurityAdvisoryPublishedEvent + | SecurityAdvisoryUpdatedEvent + | SecurityAdvisoryWithdrawnEvent; +export type SponsorshipEvent = + | SponsorshipCancelledEvent + | SponsorshipCreatedEvent + | SponsorshipEditedEvent + | SponsorshipPendingCancellationEvent + | SponsorshipPendingTierChangeEvent + | SponsorshipTierChangedEvent; +export type StarEvent = StarCreatedEvent | StarDeletedEvent; +export type TeamEvent = + | TeamAddedToRepositoryEvent + | TeamCreatedEvent + | TeamDeletedEvent + | TeamEditedEvent + | TeamRemovedFromRepositoryEvent; +export type WatchEvent = WatchStartedEvent; +export type WorkflowJobEvent = + | WorkflowJobCompletedEvent + | WorkflowJobInProgressEvent + | WorkflowJobQueuedEvent; +export type WorkflowStep = WorkflowStepInProgress | WorkflowStepCompleted; +export type WorkflowRunEvent = + | WorkflowRunCompletedEvent + | WorkflowRunInProgressEvent + | WorkflowRunRequestedEvent; + +/** + * Activity related to a branch protection rule. For more information, see "[About branch protection rules](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-rules)." + */ +export interface BranchProtectionRuleCreatedEvent { + action: "created"; + rule: BranchProtectionRule; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The branch protection rule. Includes a `name` and all the [branch protection settings](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-settings) applied to branches that match the name. Binary settings are boolean. Multi-level configurations are one of `off`, `non_admins`, or `everyone`. Actor and build lists are arrays of strings. + */ +export interface BranchProtectionRule { + id: number; + repository_id: number; + name: string; + created_at: string; + updated_at: string; + pull_request_reviews_enforcement_level: BranchProtectionRuleEnforcementLevel; + required_approving_review_count: BranchProtectionRuleNumber; + dismiss_stale_reviews_on_push: BranchProtectionRuleBoolean; + require_code_owner_review: BranchProtectionRuleBoolean; + authorized_dismissal_actors_only: BranchProtectionRuleBoolean; + ignore_approvals_from_contributors: BranchProtectionRuleBoolean; + required_status_checks: BranchProtectionRuleArray; + required_status_checks_enforcement_level: BranchProtectionRuleEnforcementLevel; + strict_required_status_checks_policy: BranchProtectionRuleBoolean; + signature_requirement_enforcement_level: BranchProtectionRuleEnforcementLevel; + linear_history_requirement_enforcement_level: BranchProtectionRuleEnforcementLevel; + admin_enforced: BranchProtectionRuleBoolean; + create_protected?: BranchProtectionRuleBoolean; + allow_force_pushes_enforcement_level: BranchProtectionRuleEnforcementLevel; + allow_deletions_enforcement_level: BranchProtectionRuleEnforcementLevel; + merge_queue_enforcement_level: BranchProtectionRuleEnforcementLevel; + required_deployments_enforcement_level: BranchProtectionRuleEnforcementLevel; + required_conversation_resolution_level: BranchProtectionRuleEnforcementLevel; + authorized_actors_only: BranchProtectionRuleBoolean; + authorized_actor_names: BranchProtectionRuleArray; +} +/** + * A git repository + */ +export interface Repository { + /** + * Unique identifier of the repository + */ + id: number; + /** + * The GraphQL identifier of the repository. + */ + node_id: string; + /** + * The name of the repository. + */ + name: string; + /** + * The full, globally unique, name of the repository. + */ + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + owner: User; + /** + * The URL to view the repository on GitHub.com. + */ + html_url: string; + /** + * The repository description. + */ + description: string | null; + /** + * Whether the repository is a fork. + */ + fork: boolean; + /** + * The URL to get more information about the repository from the GitHub API. + */ + url: string; + /** + * The API URL to list the forks of the repository. + */ + forks_url: string; + /** + * A template for the API URL to get information about deploy keys on the repository. + */ + keys_url: string; + /** + * A template for the API URL to get information about collaborators of the repository. + */ + collaborators_url: string; + /** + * The API URL to list the teams on the repository. + */ + teams_url: string; + /** + * The API URL to list the hooks on the repository. + */ + hooks_url: string; + /** + * A template for the API URL to get information about issue events on the repository. + */ + issue_events_url: string; + /** + * The API URL to list the events of the repository. + */ + events_url: string; + /** + * A template for the API URL to list the available assignees for issues in the repository. + */ + assignees_url: string; + /** + * A template for the API URL to get information about branches in the repository. + */ + branches_url: string; + /** + * The API URL to get information about tags on the repository. + */ + tags_url: string; + /** + * A template for the API URL to create or retrieve a raw Git blob in the repository. + */ + blobs_url: string; + /** + * A template for the API URL to get information about Git tags of the repository. + */ + git_tags_url: string; + /** + * A template for the API URL to get information about Git refs of the repository. + */ + git_refs_url: string; + /** + * A template for the API URL to create or retrieve a raw Git tree of the repository. + */ + trees_url: string; + /** + * A template for the API URL to get information about statuses of a commit. + */ + statuses_url: string; + /** + * The API URL to get information about the languages of the repository. + */ + languages_url: string; + /** + * The API URL to list the stargazers on the repository. + */ + stargazers_url: string; + /** + * A template for the API URL to list the contributors to the repository. + */ + contributors_url: string; + /** + * The API URL to list the subscribers on the repository. + */ + subscribers_url: string; + /** + * The API URL to subscribe to notifications for this repository. + */ + subscription_url: string; + /** + * A template for the API URL to get information about commits on the repository. + */ + commits_url: string; + /** + * A template for the API URL to get information about Git commits of the repository. + */ + git_commits_url: string; + /** + * A template for the API URL to get information about comments on the repository. + */ + comments_url: string; + /** + * A template for the API URL to get information about issue comments on the repository. + */ + issue_comment_url: string; + /** + * A template for the API URL to get the contents of the repository. + */ + contents_url: string; + /** + * A template for the API URL to compare two commits or refs. + */ + compare_url: string; + /** + * The API URL to merge branches in the repository. + */ + merges_url: string; + /** + * A template for the API URL to download the repository as an archive. + */ + archive_url: string; + /** + * The API URL to list the downloads on the repository. + */ + downloads_url: string; + /** + * A template for the API URL to get information about issues on the repository. + */ + issues_url: string; + /** + * A template for the API URL to get information about pull requests on the repository. + */ + pulls_url: string; + /** + * A template for the API URL to get information about milestones of the repository. + */ + milestones_url: string; + /** + * A template for the API URL to get information about notifications on the repository. + */ + notifications_url: string; + /** + * A template for the API URL to get information about labels of the repository. + */ + labels_url: string; + /** + * A template for the API URL to get information about releases on the repository. + */ + releases_url: string; + /** + * The API URL to list the deployments of the repository. + */ + deployments_url: string; + created_at: number | string; + updated_at: string; + pushed_at: number | string | null; + git_url: string; + ssh_url: string; + clone_url: string; + svn_url: string; + homepage: string | null; + size: number; + stargazers_count: number; + watchers_count: number; + language: string | null; + /** + * Whether issues are enabled. + */ + has_issues: boolean; + /** + * Whether projects are enabled. + */ + has_projects: boolean; + /** + * Whether downloads are enabled. + */ + has_downloads: boolean; + /** + * Whether the wiki is enabled. + */ + has_wiki: boolean; + has_pages: boolean; + forks_count: number; + mirror_url: string | null; + /** + * Whether the repository is archived. + */ + archived: boolean; + /** + * Returns whether or not this repository is disabled. + */ + disabled?: boolean; + open_issues_count: number; + license: License | null; + forks: number; + open_issues: number; + watchers: number; + stargazers?: number; + /** + * The default branch of the repository. + */ + default_branch: string; + /** + * Whether to allow squash merges for pull requests. + */ + allow_squash_merge?: boolean; + /** + * Whether to allow merge commits for pull requests. + */ + allow_merge_commit?: boolean; + /** + * Whether to allow rebase merges for pull requests. + */ + allow_rebase_merge?: boolean; + /** + * Whether to allow auto-merge for pull requests. + */ + allow_auto_merge?: boolean; + /** + * Whether to allow private forks + */ + allow_forking?: boolean; + allow_update_branch?: boolean; + use_squash_pr_title_as_default?: boolean; + squash_merge_commit_message?: string; + squash_merge_commit_title?: string; + merge_commit_message?: string; + merge_commit_title?: string; + is_template: boolean; + web_commit_signoff_required: boolean; + topics: string[]; + visibility: "public" | "private" | "internal"; + /** + * Whether to delete head branches when pull requests are merged + */ + delete_branch_on_merge?: boolean; + master_branch?: string; + permissions?: { + pull: boolean; + push: boolean; + admin: boolean; + maintain?: boolean; + triage?: boolean; + }; + public?: boolean; + organization?: string; +} +export interface User { + login: string; + id: number; + node_id: string; + name?: string; + email?: string | null; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: "Bot" | "User" | "Organization"; + site_admin: boolean; +} +export interface License { + key: string; + name: string; + spdx_id: string; + url: string | null; + node_id: string; +} +/** + * Installation + */ +export interface InstallationLite { + /** + * The ID of the installation. + */ + id: number; + node_id: string; +} +export interface Organization { + login: string; + id: number; + node_id: string; + url: string; + html_url?: string; + repos_url: string; + events_url: string; + hooks_url: string; + issues_url: string; + members_url: string; + public_members_url: string; + avatar_url: string; + description: string | null; +} +/** + * Activity related to a branch protection rule. For more information, see "[About branch protection rules](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-rules)." + */ +export interface BranchProtectionRuleDeletedEvent { + action: "deleted"; + rule: BranchProtectionRule; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * Activity related to a branch protection rule. For more information, see "[About branch protection rules](https://docs.github.com/en/github/administering-a-repository/defining-the-mergeability-of-pull-requests/about-protected-branches#about-branch-protection-rules)." + */ +export interface BranchProtectionRuleEditedEvent { + action: "edited"; + rule: BranchProtectionRule; + /** + * If the action was `edited`, the changes to the rule. + */ + changes?: { + admin_enforced?: { + from: BranchProtectionRuleBoolean; + }; + allow_deletions_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel | null; + }; + allow_force_pushes_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + authorized_actors_only?: { + from: BranchProtectionRuleBoolean; + }; + authorized_actor_names?: { + from: BranchProtectionRuleArray; + }; + authorized_dismissal_actors_only?: { + from: BranchProtectionRuleBoolean | null; + }; + dismiss_stale_reviews_on_push?: { + from: BranchProtectionRuleBoolean; + }; + pull_request_reviews_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + require_code_owner_review?: { + from: BranchProtectionRuleBoolean; + }; + required_approving_review_count?: { + from: BranchProtectionRuleNumber; + }; + required_conversation_resolution_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + required_deployments_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + required_status_checks?: { + from: BranchProtectionRuleArray; + }; + required_status_checks_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + signature_requirement_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + linear_history_requirement_enforcement_level?: { + from: BranchProtectionRuleEnforcementLevel; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunCompletedEvent { + action: "completed"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "completed"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check run. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "in_progress" | "completed" | "queued"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action?: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + } | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunPullRequest { + url: string; + id: number; + number: number; + head: { + ref: string; + sha: string; + repo: RepoRef; + }; + base: { + ref: string; + sha: string; + repo: RepoRef; + }; +} +export interface RepoRef { + id: number; + url: string; + name: string; +} +/** + * A deployment to a repository environment. This will only be populated if the check run was created by a GitHub Actions workflow job that references an environment. + */ +export interface CheckRunDeployment { + url: string; + id: number; + node_id: string; + task: string; + original_environment: string; + environment: string; + description: string | null; + created_at: string; + updated_at: string; + statuses_url: string; + repository_url: string; +} +/** + * GitHub apps are a new way to extend GitHub. They can be installed directly on organizations and user accounts and granted access to specific repositories. They come with granular permissions and built-in webhooks. GitHub apps are first class actors within GitHub. + */ +export interface App { + /** + * Unique identifier of the GitHub app + */ + id: number; + /** + * The slug name of the GitHub app + */ + slug?: string; + node_id: string; + owner: User; + /** + * The name of the GitHub app + */ + name: string; + description: string | null; + external_url: string; + html_url: string; + created_at: string; + updated_at: string; + /** + * The set of permissions for the GitHub app + */ + permissions?: { + actions?: "read" | "write"; + administration?: "read" | "write"; + blocking?: "read" | "write"; + checks?: "read" | "write"; + content_references?: "read" | "write"; + contents?: "read" | "write"; + deployments?: "read" | "write"; + discussions?: "read" | "write"; + emails?: "read" | "write"; + environments?: "read" | "write"; + issues?: "read" | "write"; + keys?: "read" | "write"; + members?: "read" | "write"; + merge_queues?: "read" | "write"; + metadata?: "read" | "write"; + organization_administration?: "read" | "write"; + organization_hooks?: "read" | "write"; + organization_packages?: "read" | "write"; + organization_plan?: "read" | "write"; + organization_projects?: "read" | "write"; + organization_secrets?: "read" | "write"; + organization_self_hosted_runners?: "read" | "write"; + organization_user_blocking?: "read" | "write"; + packages?: "read" | "write"; + pages?: "read" | "write"; + pull_requests?: "read" | "write"; + repository_hooks?: "read" | "write"; + repository_projects?: "read" | "write"; + secret_scanning_alerts?: "read" | "write"; + secrets?: "read" | "write"; + security_events?: "read" | "write"; + security_scanning_alert?: "read" | "write"; + single_file?: "read" | "write"; + statuses?: "read" | "write"; + team_discussions?: "read" | "write"; + vulnerability_alerts?: "read" | "write"; + workflows?: "read" | "write"; + }; + /** + * The list of events for the GitHub app + */ + events?: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "merge_group" + | "merge_queue_entry" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "projects_v2_item" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "security_and_analysis" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_run" + | "workflow_job" + )[]; +} +export interface CheckRunCreatedEvent { + action: "created"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed" | "waiting"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string | null; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check run. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "queued" | "in_progress" | "completed"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action?: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + } | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunRequestedActionEvent { + action: "requested_action"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string | null; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check run. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "queued" | "in_progress" | "completed" | "waiting"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckRunRerequestedEvent { + action: "rerequested"; + /** + * The [check_run](https://docs.github.com/en/rest/reference/checks#get-a-check-run). + */ + check_run: { + /** + * The id of the check. + */ + id: number; + node_id?: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url?: string; + /** + * The phase of the lifecycle that the check is currently in. + */ + status: "completed"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has `completed`. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + /** + * The time that the check run began. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + started_at: string; + /** + * The time the check completed. This is a timestamp in [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + completed_at: string; + output: { + title?: string | null; + summary: string | null; + text: string | null; + annotations_count: number; + annotations_url: string; + }; + /** + * The name of the check. + */ + name: string; + check_suite: { + /** + * The id of the check suite that this check run is part of. + */ + id: number; + node_id?: string; + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + status: "completed"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale"; + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_branch`. + * + * **Note:** + * + * * The `head_sha` of the check suite can differ from the `sha` of the pull request if subsequent pushes are made into the PR. + * * When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + app: App; + created_at: string; + updated_at: string; + }; + app: App; + pull_requests: CheckRunPullRequest[]; + deployment?: CheckRunDeployment; + }; + /** + * The action requested by the user. + */ + requested_action?: { + /** + * The integrator reference of the action requested by the user. + */ + identifier?: string; + } | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckSuiteCompletedEvent { + action: "completed"; + /** + * The [check_suite](https://docs.github.com/en/rest/reference/checks#suites). + */ + check_suite: { + id: number; + node_id: string; + /** + * The head branch name the changes are on. + */ + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + /** + * The summary status for all check runs that are part of the check suite. Can be `queued`, `requested`, `in_progress`, or `completed`. + */ + status: "requested" | "in_progress" | "completed" | "queued" | null; + /** + * The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has `completed`. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + /** + * URL that points to the check suite API resource. + */ + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + app: App; + created_at: string; + updated_at: string; + runs_rerequestable?: boolean; + rerequestable?: boolean; + latest_check_runs_count: number; + check_runs_url: string; + head_commit: SimpleCommit; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface SimpleCommit { + id: string; + tree_id: string; + message: string; + timestamp: string; + author: Committer; + committer: Committer; +} +/** + * Metaproperties for Git author/committer information. + */ +export interface Committer { + /** + * The git author's name. + */ + name: string; + /** + * The git author's email address. + */ + email: string | null; + date?: string; + username?: string; +} +export interface CheckSuiteRequestedEvent { + action: "requested"; + /** + * The [check_suite](https://docs.github.com/en/rest/reference/checks#suites). + */ + check_suite: { + id: number; + node_id: string; + /** + * The head branch name the changes are on. + */ + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + /** + * The summary status for all check runs that are part of the check suite. Can be `queued`, `requested`, `in_progress`, or `completed`. + */ + status: "requested" | "in_progress" | "completed" | "queued" | null; + /** + * The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`,` neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + /** + * URL that points to the check suite API resource. + */ + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + app: App; + created_at: string; + updated_at: string; + runs_rerequestable?: boolean; + rerequestable?: boolean; + latest_check_runs_count: number; + check_runs_url: string; + head_commit: SimpleCommit; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CheckSuiteRerequestedEvent { + action: "rerequested"; + /** + * The [check_suite](https://docs.github.com/en/rest/reference/checks#suites). + */ + check_suite: { + id: number; + node_id: string; + /** + * The head branch name the changes are on. + */ + head_branch: string | null; + /** + * The SHA of the head commit that is being checked. + */ + head_sha: string; + /** + * The summary status for all check runs that are part of the check suite. Can be `queued`, `requested`, `in_progress`, or `completed`. + */ + status: "requested" | "in_progress" | "completed" | "queued" | null; + /** + * The summary conclusion for all check runs that are part of the check suite. Can be one of `success`, `failure`,` neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + /** + * URL that points to the check suite API resource. + */ + url: string; + before: string | null; + after: string | null; + /** + * An array of pull requests that match this check suite. A pull request matches a check suite if they have the same `head_sha` and `head_branch`. When the check suite's `head_branch` is in a forked repository it will be `null` and the `pull_requests` array will be empty. + */ + pull_requests: CheckRunPullRequest[]; + app: App; + created_at: string; + updated_at: string; + latest_check_runs_count: number; + check_runs_url: string; + head_commit: SimpleCommit; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertAppearedInBranchEvent { + action: "appeared_in_branch"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: AlertInstance[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed" | "fixed"; + dismissed_by: User | null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: "false positive" | "won't fix" | "used in tests" | null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface AlertInstance { + /** + * The full Git reference, formatted as `refs/heads/`. + */ + ref: string; + /** + * Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name. + */ + analysis_key: string; + /** + * Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed. + */ + environment: string; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed" | "fixed"; + commit_sha?: string; + message?: { + text?: string; + }; + location?: { + path?: string; + start_line?: number; + end_line?: number; + start_column?: number; + end_column?: number; + }; + classifications?: string[]; +} +export interface GitHubOrg { + login: "github"; + id: 9919; + node_id: "MDEyOk9yZ2FuaXphdGlvbjk5MTk="; + name?: "GitHub"; + email?: null; + avatar_url: "https://avatars.githubusercontent.com/u/9919?v=4"; + gravatar_id: ""; + url: "https://api.github.com/users/github"; + html_url: "https://github.com/github"; + followers_url: "https://api.github.com/users/github/followers"; + following_url: "https://api.github.com/users/github/following{/other_user}"; + gists_url: "https://api.github.com/users/github/gists{/gist_id}"; + starred_url: "https://api.github.com/users/github/starred{/owner}{/repo}"; + subscriptions_url: "https://api.github.com/users/github/subscriptions"; + organizations_url: "https://api.github.com/users/github/orgs"; + repos_url: "https://api.github.com/users/github/repos"; + events_url: "https://api.github.com/users/github/events{/privacy}"; + received_events_url: "https://api.github.com/users/github/received_events"; + type: "Organization"; + site_admin: false; +} +export interface CodeScanningAlertClosedByUserEvent { + action: "closed_by_user"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "dismissed"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "dismissed"; + dismissed_by: User; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: "false positive" | "won't fix" | "used in tests" | null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertCreatedEvent { + action: "created"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "open" | "dismissed"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed"; + dismissed_by: null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertFixedEvent { + action: "fixed"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "fixed"; + })[]; + /** + * State of a code scanning alert. + */ + state: "fixed"; + dismissed_by: User | null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: "false positive" | "won't fix" | "used in tests" | null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + most_recent_instance?: AlertInstance; + instances_url?: string; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertReopenedEvent { + action: "reopened"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "open"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open" | "dismissed" | "fixed"; + dismissed_by: null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + name?: string; + full_description?: string; + tags?: null; + help?: null; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + guid?: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface CodeScanningAlertReopenedByUserEvent { + action: "reopened_by_user"; + /** + * The code scanning alert involved in the event. + */ + alert: { + /** + * The code scanning alert number. + */ + number: number; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ.` + */ + created_at: string; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + instances: (AlertInstance & { + state: "open"; + })[]; + most_recent_instance?: AlertInstance; + /** + * State of a code scanning alert. + */ + state: "open"; + dismissed_by: null; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: null; + /** + * The reason for dismissing or closing the alert. Can be one of: `false positive`, `won't fix`, and `used in tests`. + */ + dismissed_reason: null; + rule: { + /** + * A unique identifier for the rule used to detect the alert. + */ + id: string; + /** + * The severity of the alert. + */ + severity: "none" | "note" | "warning" | "error" | null; + /** + * A short description of the rule used to detect the alert. + */ + description: string; + }; + tool: { + /** + * The name of the tool used to generate the code scanning analysis alert. + */ + name: string; + /** + * The version of the tool used to detect the alert. + */ + version: string | null; + }; + }; + /** + * The Git reference of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + ref: string; + /** + * The commit SHA of the code scanning alert. When the action is `reopened_by_user` or `closed_by_user`, the event was triggered by the `sender` and this value will be empty. + */ + commit_oid: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A commit comment is created. The type of activity is specified in the `action` property. + */ +export interface CommitCommentCreatedEvent { + /** + * The action performed. Can be `created`. + */ + action: "created"; + /** + * The [commit comment](https://docs.github.com/en/rest/reference/repos#get-a-commit-comment) resource. + */ + comment: { + url: string; + html_url: string; + /** + * The ID of the commit comment. + */ + id: number; + /** + * The node ID of the commit comment. + */ + node_id: string; + user: User; + /** + * The line index in the diff to which the comment applies. + */ + position: number | null; + /** + * The line of the blob to which the comment applies. The last line of the range for a multi-line comment + */ + line: number | null; + /** + * The relative path of the file to which the comment applies. + */ + path: string | null; + /** + * The SHA of the commit to which the comment applies. + */ + commit_id: string; + created_at: string; + updated_at: string; + author_association: AuthorAssociation; + /** + * The text of the comment. + */ + body: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A Git branch or tag is created. + */ +export interface CreateEvent { + /** + * The [`git ref`](https://docs.github.com/en/rest/reference/git#get-a-reference) resource. + */ + ref: string; + /** + * The type of Git ref object created in the repository. Can be either `branch` or `tag`. + */ + ref_type: "tag" | "branch"; + /** + * The name of the repository's default branch (usually `main`). + */ + master_branch: string; + /** + * The repository's current description. + */ + description: string | null; + /** + * The pusher type for the event. Can be either `user` or a deploy key. + */ + pusher_type: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A Git branch or tag is deleted. + */ +export interface DeleteEvent { + /** + * The [`git ref`](https://docs.github.com/en/rest/reference/git#get-a-reference) resource. + */ + ref: string; + /** + * The type of Git ref object deleted in the repository. Can be either `branch` or `tag`. + */ + ref_type: "tag" | "branch"; + /** + * The pusher type for the event. Can be either `user` or a deploy key. + */ + pusher_type: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertCreatedEvent { + action: "created"; + alert: DependabotAlert & { + state: "open"; + fixed_at: null; + dismissed_at: null; + dismissed_by: null; + dismissed_reason: null; + dismissed_comment: null; + }; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +/** + * A Dependabot alert. + */ +export interface DependabotAlert { + /** + * The security alert number. + */ + number: number; + /** + * The state of the Dependabot alert. + */ + state: "dismissed" | "fixed" | "open"; + /** + * Details for the vulnerable dependency. + */ + dependency: { + package: DependabotAlertPackage; + /** + * The full path to the dependency manifest file, relative to the root of the repository. + */ + manifest_path: string; + /** + * The execution scope of the vulnerable dependency. + */ + scope: "development" | "runtime" | null; + }; + /** + * Details for the GitHub Security Advisory. + */ + security_advisory: { + /** + * Details for the GitHub Security Advisory. + */ + ghsa_id: string; + /** + * The unique CVE ID assigned to the advisory. + */ + cve_id: string | null; + /** + * A short, plain text summary of the advisory. + */ + summary: string; + /** + * A long-form Markdown-supported description of the advisory. + */ + description: string; + /** + * Vulnerable version range information for the advisory. + */ + vulnerabilities: { + package: DependabotAlertPackage; + /** + * The severity of the vulnerability. + */ + severity: "low" | "medium" | "high" | "critical"; + /** + * Conditions that identify vulnerable versions of this vulnerability's package. + */ + vulnerable_version_range: string; + /** + * Details pertaining to the package version that patches this vulnerability. + */ + first_patched_version: { + /** + * The package version that patches this vulnerability. + */ + identifier: string; + }; + }[]; + /** + * The severity of the advisory. + */ + severity: "low" | "medium" | "high" | "critical"; + cvss: SecurityAdvisoryCvss; + /** + * Details for the advisory pertaining to Common Weakness Enumeration. + */ + cwes: SecurityAdvisoryCwes[]; + /** + * Values that identify this advisory among security information sources. + */ + identifiers: { + /** + * The type of advisory identifier. + */ + type: "CVE" | "GHSA"; + /** + * The value of the advisory identifer. + */ + value: string; + }[]; + /** + * Links to additional advisory information. + */ + references: { + /** + * The URL of the reference. + */ + url: string; + }[]; + /** + * The time that the advisory was published in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + published_at: string; + /** + * The time that the advisory was last modified in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + updated_at: string; + /** + * The time that the advisory was withdrawn in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + withdrawn_at: string | null; + }; + /** + * Details pertaining to one vulnerable version range for the advisory. + */ + security_vulnerability: { + package: DependabotAlertPackage; + /** + * The severity of the vulnerability. + */ + severity: "low" | "medium" | "high" | "critical"; + /** + * Conditions that identify vulnerable versions of this vulnerability's package. + */ + vulnerable_version_range: string; + /** + * Details pertaining to the package version that patches this vulnerability. + */ + first_patched_version: { + /** + * The package version that patches this vulnerability. + */ + identifier: string; + }; + }; + /** + * The REST API URL of the alert resource. + */ + url: string; + /** + * The GitHub URL of the alert resource. + */ + html_url: string; + /** + * The time that the alert was created in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + created_at: string; + /** + * The time that the alert was last updated in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + updated_at: string; + /** + * The time that the alert was dismissed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + dismissed_at: string | null; + dismissed_by: User | null; + /** + * The reason that the alert was dismissed. + */ + dismissed_reason: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk" + | null; + /** + * An optional comment associated with the alert's dismissal. + */ + dismissed_comment: string | null; + /** + * The time that the alert was no longer detected and was considered fixed in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. + */ + fixed_at: string | null; +} +/** + * Details for the vulnerable package. + */ +export interface DependabotAlertPackage { + /** + * The unique package name within its ecosystem. + */ + name: string; + /** + * The package's language or package management ecosystem. + */ + ecosystem: string; +} +/** + * Details for the advisory pertaining to the Common Vulnerability Scoring System. + */ +export interface SecurityAdvisoryCvss { + /** + * The overall CVSS score of the advisory. + */ + score: number; + /** + * The full CVSS vector string for the advisory. + */ + vector_string: string | null; +} +/** + * A CWE weakness assigned to the advisory. + */ +export interface SecurityAdvisoryCwes { + /** + * The unique CWE ID. + */ + cwe_id: string; + /** + * The short, plain text name of the CWE. + */ + name: string; +} +export interface DependabotAlertDismissedEvent { + action: "dismissed"; + alert: DependabotAlert & { + state: "dismissed"; + dismissed_at: string; + dismissed_by: User; + dismissed_reason: + | "fix_started" + | "inaccurate" + | "no_bandwidth" + | "not_used" + | "tolerable_risk"; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertFixedEvent { + action: "fixed"; + alert: DependabotAlert & { + state: "fixed"; + fixed_at: string; + }; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertReintroducedEvent { + action: "reintroduced"; + alert: DependabotAlert; + repository: Repository; + sender: GitHubOrg; + installation?: InstallationLite; + organization?: Organization; +} +export interface DependabotAlertReopenedEvent { + action: "reopened"; + alert: DependabotAlert; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DeployKeyCreatedEvent { + action: "created"; + /** + * The [`deploy key`](https://docs.github.com/en/rest/reference/deployments#get-a-deploy-key) resource. + */ + key: { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DeployKeyDeletedEvent { + action: "deleted"; + /** + * The [`deploy key`](https://docs.github.com/en/rest/reference/deployments#get-a-deploy-key) resource. + */ + key: { + id: number; + key: string; + url: string; + title: string; + verified: boolean; + created_at: string; + read_only: boolean; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DeploymentCreatedEvent { + action: "created"; + deployment: Deployment; + workflow: Workflow | null; + workflow_run: DeploymentWorkflowRun | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The [deployment](https://docs.github.com/en/rest/reference/deployments#list-deployments). + */ +export interface Deployment { + url: string; + /** + * Unique identifier of the deployment + */ + id: number; + node_id: string; + sha: string; + /** + * The ref to deploy. This can be a branch, tag, or sha. + */ + ref: string; + /** + * Parameter to specify a task to execute + */ + task: string; + payload: { + [k: string]: unknown; + }; + original_environment: string; + /** + * Name of the target deployment environment. + */ + environment: string; + /** + * Specifies if the given environment will no longer exist at some point in the future. Default: false. + */ + transient_environment?: boolean; + /** + * Specifies if the given environment is one that end-users directly interact with. Default: false. + */ + production_environment?: boolean; + description: string | null; + creator: User; + created_at: string; + updated_at: string; + statuses_url: string; + repository_url: string; + performed_via_github_app?: App | null; +} +export interface Workflow { + badge_url: string; + created_at: string; + html_url: string; + id: number; + name: string; + node_id: string; + path: string; + state: string; + updated_at: string; + url: string; +} +export interface DeploymentWorkflowRun { + id: number; + name: string; + path?: string; + display_title?: string; + node_id: string; + head_branch: string; + head_sha: string; + run_number: number; + event: string; + status: "requested" | "in_progress" | "completed" | "queued"; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | null; + workflow_id: number; + check_suite_id: number; + check_suite_node_id: string; + url: string; + html_url: string; + pull_requests: CheckRunPullRequest[]; + created_at: string; + updated_at: string; + actor: User; + triggering_actor: User; + run_attempt: number; + run_started_at: string; + referenced_workflows?: ReferencedWorkflow[]; +} +/** + * A workflow referenced/reused by the initial caller workflow + */ +export interface ReferencedWorkflow { + path: string; + sha: string; + ref?: string; +} +export interface DeploymentStatusCreatedEvent { + action: "created"; + /** + * The [deployment status](https://docs.github.com/en/rest/reference/deployments#list-deployment-statuses). + */ + deployment_status: { + url: string; + id: number; + node_id: string; + /** + * The new state. Can be `pending`, `success`, `failure`, or `error`. + */ + state: string; + creator: User; + /** + * The optional human-readable description added to the status. + */ + description: string; + environment: string; + environment_url?: string | ""; + log_url?: string; + /** + * The optional link added to the status. + */ + target_url: string; + created_at: string; + updated_at: string; + deployment_url: string; + repository_url: string; + performed_via_github_app?: App | null; + }; + deployment: Deployment; + check_run?: { + /** + * The id of the check. + */ + id: number; + /** + * The name of the check run. + */ + name: string; + node_id: string; + /** + * The SHA of the commit that is being checked. + */ + head_sha: string; + external_id: string; + url: string; + html_url: string; + details_url: string; + /** + * The current status of the check run. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed" | "waiting"; + /** + * The result of the completed check run. Can be one of `success`, `failure`, `neutral`, `cancelled`, `timed_out`, `action_required` or `stale`. This value will be `null` until the check run has completed. + */ + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + started_at: string; + completed_at: string | null; + }; + workflow_run?: DeploymentWorkflowRun; + workflow?: Workflow; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionAnsweredEvent { + action: "answered"; + discussion: Discussion & { + category: { + is_answerable: true; + }; + answer_html_url: string; + answer_chosen_at: string; + answer_chosen_by: User; + }; + answer: { + id: number; + node_id: string; + html_url: string; + parent_id: null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + body: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Discussion { + repository_url: string; + category: { + id: number; + repository_id: number; + emoji: string; + name: string; + description: string; + created_at: string; + updated_at: string; + slug: string; + is_answerable: boolean; + }; + answer_html_url: string | null; + answer_chosen_at: string | null; + answer_chosen_by: User | null; + html_url: string; + id: number; + node_id: string; + number: number; + /** + * The discussion post's title. + */ + title: string; + user: User; + state: "open" | "locked" | "converting"; + locked: boolean; + comments: number; + created_at: string; + updated_at: string; + author_association: AuthorAssociation; + active_lock_reason: string | null; + /** + * The discussion post's body text. + */ + body: string; + reactions?: Reactions; +} +export interface Reactions { + url: string; + total_count: number; + "+1": number; + "-1": number; + laugh: number; + hooray: number; + confused: number; + heart: number; + rocket: number; + eyes: number; +} +export interface DiscussionCategoryChangedEvent { + changes: { + category: { + from: { + id: number; + repository_id: number; + emoji: string; + name: string; + description: string; + created_at: string; + updated_at: string; + slug: string; + is_answerable: boolean; + }; + }; + }; + action: "category_changed"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionCreatedEvent { + action: "created"; + discussion: Discussion & { + state: "open" | "converting"; + locked: false; + answer_html_url: null; + answer_chosen_at: null; + answer_chosen_by: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionDeletedEvent { + action: "deleted"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionEditedEvent { + changes?: { + title?: { + from: string; + }; + body?: { + from: string; + }; + }; + action: "edited"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionLabeledEvent { + action: "labeled"; + discussion: Discussion; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Label { + id: number; + node_id: string; + /** + * URL for the label + */ + url: string; + /** + * The name of the label. + */ + name: string; + description: string | null; + /** + * 6-character hex code, without the leading #, identifying the color + */ + color: string; + default: boolean; +} +export interface DiscussionLockedEvent { + action: "locked"; + discussion: Discussion & { + state: "locked"; + locked: true; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionPinnedEvent { + action: "pinned"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionTransferredEvent { + changes: { + new_discussion: Discussion; + new_repository: Repository; + }; + action: "transferred"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnansweredEvent { + action: "unanswered"; + discussion: Discussion & { + category: { + is_answerable: true; + }; + answer_html_url: null; + answer_chosen_at: null; + answer_chosen_by: null; + }; + old_answer: { + id: number; + node_id: string; + html_url: string; + parent_id: null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + body: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnlabeledEvent { + action: "unlabeled"; + discussion: Discussion; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnlockedEvent { + action: "unlocked"; + discussion: Discussion & { + state: "open"; + locked: false; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionUnpinnedEvent { + action: "unpinned"; + discussion: Discussion; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface DiscussionCommentCreatedEvent { + action: "created"; + comment: { + id: number; + node_id: string; + html_url: string; + parent_id: number | null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + /** + * The main text of the comment. + */ + body: string; + reactions: Reactions; + }; + discussion: Discussion; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +export interface DiscussionCommentDeletedEvent { + action: "deleted"; + comment: { + id: number; + node_id: string; + html_url: string; + parent_id: number | null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + /** + * The main text of the comment. + */ + body: string; + reactions: Reactions; + }; + discussion: Discussion; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +export interface DiscussionCommentEditedEvent { + changes: { + body: { + from: string; + }; + }; + action: "edited"; + comment: { + id: number; + node_id: string; + html_url: string; + parent_id: number | null; + child_comment_count: number; + repository_url: string; + discussion_id: number; + author_association: AuthorAssociation; + user: User; + created_at: string; + updated_at: string; + /** + * The main text of the comment. + */ + body: string; + reactions: Reactions; + }; + discussion: Discussion; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +/** + * A user forks a repository. + */ +export interface ForkEvent { + /** + * The created [`repository`](https://docs.github.com/en/rest/reference/repos#get-a-repository) resource. + */ + forkee: Repository & { + fork?: true; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface GithubAppAuthorizationRevokedEvent { + action: "revoked"; + sender: User; +} +/** + * A wiki page is created or updated. + */ +export interface GollumEvent { + /** + * The pages that were updated. + */ + pages: { + /** + * The name of the page. + */ + page_name: string; + /** + * The current page title. + */ + title: string; + summary: null; + /** + * The action that was performed on the page. Can be `created` or `edited`. + */ + action: "created" | "edited"; + /** + * The latest commit SHA of the page. + */ + sha: string; + /** + * Points to the HTML wiki page. + */ + html_url: string; + }[]; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface InstallationCreatedEvent { + action: "created"; + installation: Installation; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: User; + sender: User; +} +/** + * The GitHub App installation. + */ +export interface Installation { + /** + * The ID of the installation. + */ + id: number; + account: User; + /** + * Describe whether all repositories have been selected or there's a selection involved + */ + repository_selection: "all" | "selected"; + access_tokens_url: string; + repositories_url: string; + html_url: string; + app_id: number; + app_slug?: string; + /** + * The ID of the user or organization this token is being scoped to. + */ + target_id: number; + target_type: "User" | "Organization"; + permissions: { + /** + * The level of permission granted to the access token for GitHub Actions workflows, workflow runs, and artifacts. + */ + actions?: "read" | "write"; + /** + * The level of permission granted to the access token for repository creation, deletion, settings, teams, and collaborators creation. + */ + administration?: "read" | "write"; + blocking?: "read" | "write"; + /** + * The level of permission granted to the access token for checks on code. + */ + checks?: "read" | "write"; + content_references?: "read" | "write"; + /** + * The level of permission granted to the access token for repository contents, commits, branches, downloads, releases, and merges. + */ + contents?: "read" | "write"; + /** + * The level of permission granted to the access token for deployments and deployment statuses. + */ + deployments?: "read" | "write"; + discussions?: "read" | "write"; + emails?: "read" | "write"; + /** + * The level of permission granted to the access token for managing repository environments. + */ + environments?: "read" | "write"; + /** + * The level of permission granted to the access token for issues and related comments, assignees, labels, and milestones. + */ + issues?: "read" | "write"; + /** + * The level of permission granted to the access token for organization teams and members. + */ + members?: "read" | "write"; + /** + * The level of permission granted to the access token to search repositories, list collaborators, and access repository metadata. + */ + metadata?: "read" | "write"; + /** + * The level of permission granted to the access token to manage access to an organization. + */ + organization_administration?: "read" | "write"; + organization_events?: "read" | "write"; + /** + * The level of permission granted to the access token to manage the post-receive hooks for an organization. + */ + organization_hooks?: "read" | "write"; + /** + * The level of permission granted to the access token for organization packages published to GitHub Packages. + */ + organization_packages?: "read" | "write"; + /** + * The level of permission granted to the access token for viewing an organization's plan. + */ + organization_plan?: "read" | "write"; + /** + * The level of permission granted to the access token to manage organization projects and projects beta (where available). + */ + organization_projects?: "read" | "write"; + /** + * The level of permission granted to the access token to manage organization secrets. + */ + organization_secrets?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage GitHub Actions self-hosted runners available to an organization. + */ + organization_self_hosted_runners?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage users blocked by the organization. + */ + organization_user_blocking?: "read" | "write"; + /** + * The level of permission granted to the access token for packages published to GitHub Packages. + */ + packages?: "read" | "write"; + /** + * The level of permission granted to the access token to retrieve Pages statuses, configuration, and builds, as well as create new builds. + */ + pages?: "read" | "write"; + /** + * The level of permission granted to the access token for pull requests and related comments, assignees, labels, milestones, and merges. + */ + pull_requests?: "read" | "write"; + /** + * The level of permission granted to the access token to manage the post-receive hooks for a repository. + */ + repository_hooks?: "read" | "write"; + /** + * The level of permission granted to the access token to manage repository projects, columns, and cards. + */ + repository_projects?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage secret scanning alerts. + */ + secret_scanning_alerts?: "read" | "write"; + /** + * The level of permission granted to the access token to manage repository secrets. + */ + secrets?: "read" | "write"; + /** + * The level of permission granted to the access token to view and manage security events like code scanning alerts. + */ + security_events?: "read" | "write"; + security_scanning_alert?: "read" | "write"; + /** + * The level of permission granted to the access token to manage just a single file. + */ + single_file?: "read" | "write"; + /** + * The level of permission granted to the access token for commit statuses. + */ + statuses?: "read" | "write"; + /** + * The level of permission granted to the access token to manage team discussions and related comments. + */ + team_discussions?: "read" | "write"; + /** + * The level of permission granted to the access token to manage Dependabot alerts. + */ + vulnerability_alerts?: "read" | "write"; + /** + * The level of permission granted to the access token to update GitHub Actions workflow files. + */ + workflows?: "read" | "write"; + }; + events: ( + | "branch_protection_rule" + | "check_run" + | "check_suite" + | "code_scanning_alert" + | "commit_comment" + | "content_reference" + | "create" + | "delete" + | "deployment" + | "deployment_review" + | "deployment_status" + | "deploy_key" + | "discussion" + | "discussion_comment" + | "fork" + | "gollum" + | "issues" + | "issue_comment" + | "label" + | "member" + | "membership" + | "merge_queue_entry" + | "milestone" + | "organization" + | "org_block" + | "page_build" + | "project" + | "projects_v2_item" + | "project_card" + | "project_column" + | "public" + | "pull_request" + | "pull_request_review" + | "pull_request_review_comment" + | "pull_request_review_thread" + | "push" + | "registry_package" + | "release" + | "repository" + | "repository_dispatch" + | "secret_scanning_alert" + | "secret_scanning_alert_location" + | "star" + | "status" + | "team" + | "team_add" + | "watch" + | "workflow_dispatch" + | "workflow_job" + | "workflow_run" + )[]; + created_at: string | number; + updated_at: string | number; + single_file_name: string | null; + has_multiple_single_files?: boolean; + single_file_paths?: string[]; + suspended_by: User | null; + suspended_at: string | null; +} +export interface InstallationDeletedEvent { + action: "deleted"; + installation: Installation; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationNewPermissionsAcceptedEvent { + action: "new_permissions_accepted"; + installation: Installation; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationSuspendEvent { + action: "suspend"; + installation: Installation & { + suspended_by: User; + suspended_at: string; + }; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationUnsuspendEvent { + action: "unsuspend"; + installation: Installation & { + suspended_by: null; + suspended_at: null; + }; + /** + * An array of repository objects that the installation can access. + */ + repositories?: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester?: null; + sender: User; +} +export interface InstallationRepositoriesAddedEvent { + action: "added"; + installation: Installation; + /** + * Describe whether all repositories have been selected or there's a selection involved + */ + repository_selection: "all" | "selected"; + /** + * An array of repository objects, which were added to the installation. + */ + repositories_added: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + /** + * An array of repository objects, which were removed from the installation. + * + * @maxItems 0 + */ + repositories_removed: []; + requester: User | null; + sender: User; +} +export interface InstallationRepositoriesRemovedEvent { + action: "removed"; + installation: Installation; + /** + * Describe whether all repositories have been selected or there's a selection involved + */ + repository_selection: "all" | "selected"; + /** + * An array of repository objects, which were added to the installation. + * + * @maxItems 0 + */ + repositories_added: []; + /** + * An array of repository objects, which were removed from the installation. + */ + repositories_removed: { + /** + * Unique identifier of the repository + */ + id: number; + node_id: string; + /** + * The name of the repository. + */ + name: string; + full_name: string; + /** + * Whether the repository is private or public. + */ + private: boolean; + }[]; + requester: User | null; + sender: User; +} +export interface IssueCommentCreatedEvent { + action: "created"; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) the comment belongs to. + */ + issue: Issue & { + assignee: User | null; + /** + * State of the issue; either 'open' or 'closed' + */ + state: "open" | "closed"; + locked: boolean; + labels: Label[]; + }; + comment: IssueComment; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The [issue](https://docs.github.com/en/rest/reference/issues) itself. + */ +export interface Issue { + /** + * URL for the issue + */ + url: string; + repository_url: string; + labels_url: string; + comments_url: string; + events_url: string; + html_url: string; + id: number; + node_id: string; + /** + * Number uniquely identifying the issue within its repository + */ + number: number; + /** + * Title of the issue + */ + title: string; + user: User; + labels?: Label[]; + /** + * State of the issue; either 'open' or 'closed' + */ + state?: "open" | "closed"; + locked?: boolean; + assignee?: User | null; + assignees: User[]; + milestone: Milestone | null; + comments: number; + created_at: string; + updated_at: string; + closed_at: string | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + draft?: boolean; + performed_via_github_app?: App | null; + pull_request?: { + url?: string; + html_url?: string; + diff_url?: string; + patch_url?: string; + merged_at?: string | null; + }; + /** + * Contents of the issue + */ + body: string | null; + reactions: Reactions; + timeline_url?: string; + /** + * The reason for the current state + */ + state_reason?: string | null; +} +/** + * A collection of related issues and pull requests. + */ +export interface Milestone { + url: string; + html_url: string; + labels_url: string; + id: number; + node_id: string; + /** + * The number of the milestone. + */ + number: number; + /** + * The title of the milestone. + */ + title: string; + description: string | null; + creator: User; + open_issues: number; + closed_issues: number; + /** + * The state of the milestone. + */ + state: "open" | "closed"; + created_at: string; + updated_at: string; + due_on: string | null; + closed_at: string | null; +} +/** + * The [comment](https://docs.github.com/en/rest/reference/issues#comments) itself. + */ +export interface IssueComment { + /** + * URL for the issue comment + */ + url: string; + html_url: string; + issue_url: string; + /** + * Unique identifier of the issue comment + */ + id: number; + node_id: string; + user: User; + created_at: string; + updated_at: string; + author_association: AuthorAssociation; + /** + * Contents of the issue comment + */ + body: string; + reactions: Reactions; + performed_via_github_app: App | null; +} +export interface IssueCommentDeletedEvent { + action: "deleted"; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) the comment belongs to. + */ + issue: Issue & { + assignee: User | null; + /** + * State of the issue; either 'open' or 'closed' + */ + state: "open" | "closed"; + locked: boolean; + labels: Label[]; + }; + comment: IssueComment; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssueCommentEditedEvent { + action: "edited"; + /** + * The changes to the comment. + */ + changes: { + body?: { + /** + * The previous version of the body. + */ + from: string; + }; + }; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) the comment belongs to. + */ + issue: Issue & { + assignee: User | null; + /** + * State of the issue; either 'open' or 'closed' + */ + state: "open" | "closed"; + locked: boolean; + labels: Label[]; + }; + comment: IssueComment; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * Activity related to an issue. The type of activity is specified in the action property. + */ +export interface IssuesAssignedEvent { + /** + * The action that was performed. + */ + action: "assigned"; + issue: Issue; + /** + * The optional user who was assigned or unassigned from the issue. + */ + assignee?: User | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesClosedEvent { + /** + * The action that was performed. + */ + action: "closed"; + /** + * The [issue](https://docs.github.com/en/rest/reference/issues) itself. + */ + issue: Issue & { + state: "closed"; + closed_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesDeletedEvent { + action: "deleted"; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesDemilestonedEvent { + action: "demilestoned"; + issue: Issue & { + milestone: null; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesEditedEvent { + action: "edited"; + issue: Issue; + label?: Label; + /** + * The changes to the issue. + */ + changes: { + body?: { + /** + * The previous version of the body. + */ + from: string; + }; + title?: { + /** + * The previous version of the title. + */ + from: string; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesLabeledEvent { + action: "labeled"; + issue: Issue; + label?: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesLockedEvent { + action: "locked"; + issue: Issue & { + locked: true; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesMilestonedEvent { + action: "milestoned"; + issue: Issue & { + milestone: Milestone; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesOpenedEvent { + action: "opened"; + changes?: { + old_issue: Issue; + old_repository: Repository; + }; + issue: Issue & { + state: "open"; + closed_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesPinnedEvent { + action: "pinned"; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesReopenedEvent { + action: "reopened"; + issue: Issue & { + state: "open"; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesTransferredEvent { + action: "transferred"; + changes: { + new_issue: Issue; + new_repository: Repository; + }; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnassignedEvent { + /** + * The action that was performed. + */ + action: "unassigned"; + issue: Issue; + /** + * The optional user who was assigned or unassigned from the issue. + */ + assignee?: User | null; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnlabeledEvent { + action: "unlabeled"; + issue: Issue; + label?: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnlockedEvent { + action: "unlocked"; + issue: Issue & { + locked: false; + active_lock_reason: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface IssuesUnpinnedEvent { + action: "unpinned"; + issue: Issue; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface LabelCreatedEvent { + action: "created"; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface LabelDeletedEvent { + action: "deleted"; + label: Label; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface LabelEditedEvent { + action: "edited"; + label: Label; + /** + * The changes to the label if the action was `edited`. + */ + changes?: { + color?: { + /** + * The previous version of the color if the action was `edited`. + */ + from: string; + }; + name?: { + /** + * The previous version of the name if the action was `edited`. + */ + from: string; + }; + description?: { + /** + * The previous version of the description if the action was `edited`. + */ + from: string; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MarketplacePurchaseCancelledEvent { + action: "cancelled"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchase { + account: { + type: string; + id: number; + node_id: string; + login: string; + organization_billing_email: string; + }; + billing_cycle: string; + unit_count: number; + on_free_trial: boolean; + free_trial_ends_on: string | null; + next_billing_date?: string; + plan: { + id: number; + name: string; + description: string; + monthly_price_in_cents: number; + yearly_price_in_cents: number; + price_model: string; + has_free_trial: boolean; + unit_name: string | null; + bullets: string[]; + }; +} +export interface MarketplacePurchaseChangedEvent { + action: "changed"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchasePendingChangeEvent { + action: "pending_change"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchasePendingChangeCancelledEvent { + action: "pending_change_cancelled"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +export interface MarketplacePurchasePurchasedEvent { + action: "purchased"; + effective_date: string; + sender: { + login: string; + id: number; + avatar_url: string; + gravatar_id: string; + url: string; + html_url: string; + followers_url: string; + following_url: string; + gists_url: string; + starred_url: string; + subscriptions_url: string; + organizations_url: string; + repos_url: string; + events_url: string; + received_events_url: string; + type: string; + site_admin: boolean; + email: string; + }; + marketplace_purchase: MarketplacePurchase & { + next_billing_date: string; + }; + previous_marketplace_purchase?: MarketplacePurchase; +} +/** + * Activity related to repository collaborators. The type of activity is specified in the action property. + */ +export interface MemberAddedEvent { + action: "added"; + changes?: { + permission?: { + to: "write" | "admin"; + }; + }; + member: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface MemberEditedEvent { + action: "edited"; + member: User; + /** + * The changes to the collaborator permissions + */ + changes: { + old_permission: { + /** + * The previous permissions of the collaborator if the action was edited. + */ + from: string; + }; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface MemberRemovedEvent { + action: "removed"; + member: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface MembershipAddedEvent { + action: "added"; + /** + * The scope of the membership. Currently, can only be `team`. + */ + scope: "team"; + member: User; + sender: User; + team: Team; + organization: Organization; + installation?: InstallationLite; +} +/** + * Groups of organization members that gives permissions on specified repositories. + */ +export interface Team { + /** + * Name of the team + */ + name: string; + /** + * Unique identifier of the team + */ + id: number; + node_id: string; + slug: string; + /** + * Description of the team + */ + description: string | null; + privacy: "open" | "closed" | "secret"; + /** + * URL for the team + */ + url: string; + html_url: string; + members_url: string; + repositories_url: string; + /** + * Permission that the team will have for its repositories + */ + permission: string; + parent?: { + /** + * Name of the team + */ + name: string; + /** + * Unique identifier of the team + */ + id: number; + node_id: string; + slug: string; + /** + * Description of the team + */ + description: string | null; + privacy: "open" | "closed" | "secret"; + /** + * URL for the team + */ + url: string; + html_url: string; + members_url: string; + repositories_url: string; + /** + * Permission that the team will have for its repositories + */ + permission: string; + } | null; +} +export interface MembershipRemovedEvent { + action: "removed"; + /** + * The scope of the membership. Currently, can only be `team`. + */ + scope: "team" | "organization"; + member: User; + sender: User; + /** + * The [team](https://docs.github.com/en/rest/reference/teams) for the membership. + */ + team: + | Team + | { + id: number; + name: string; + deleted?: boolean; + }; + organization: Organization; + installation?: InstallationLite; +} +export interface MergeGroupChecksRequestedEvent { + action: "checks_requested"; + /** + * The merge group. + */ + merge_group: { + /** + * The SHA of the merge group. + */ + head_sha: string; + /** + * The full ref of the merge group. + */ + head_ref: string; + /** + * The full ref of the branch the merge group will be merged into. + */ + base_ref: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MetaDeletedEvent { + action: "deleted"; + /** + * The id of the modified webhook. + */ + hook_id: number; + /** + * The modified webhook. This will contain different keys based on the type of webhook it is: repository, organization, business, app, or GitHub Marketplace. + */ + hook: { + type: string; + id: number; + name: string; + active: boolean; + events: WebhookEvents; + /** + * Configuration object of the webhook + */ + config: { + /** + * The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + */ + content_type: "json" | "form"; + /** + * If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + */ + secret?: string; + /** + * The URL to which the payloads will be delivered. + */ + url: string; + /** + * Determines whether the SSL certificate of the host for `url` will be verified when delivering payloads. Supported values include `0` (verification is performed) and `1` (verification is not performed). The default is `0`. + */ + insecure_ssl: "0" | "1"; + }; + updated_at: string; + created_at: string; + }; + repository: Repository; + sender: User; +} +export interface MilestoneClosedEvent { + action: "closed"; + milestone: Milestone & { + state: "closed"; + closed_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneCreatedEvent { + action: "created"; + milestone: Milestone & { + state: "open"; + closed_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneDeletedEvent { + action: "deleted"; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneEditedEvent { + action: "edited"; + /** + * The changes to the milestone if the action was `edited`. + */ + changes: { + description?: { + /** + * The previous version of the description if the action was `edited`. + */ + from: string; + }; + due_on?: { + /** + * The previous version of the due date if the action was `edited`. + */ + from: string; + }; + title?: { + /** + * The previous version of the title if the action was `edited`. + */ + from: string; + }; + }; + milestone: Milestone; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface MilestoneOpenedEvent { + action: "opened"; + milestone: Milestone & { + state: "open"; + closed_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface OrgBlockBlockedEvent { + action: "blocked"; + blocked_user: User; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrgBlockUnblockedEvent { + action: "unblocked"; + blocked_user: User; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationDeletedEvent { + action: "deleted"; + membership?: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +/** + * The membership between the user and the organization. Not present when the action is `member_invited`. + */ +export interface Membership { + url: string; + /** + * The state of the user's membership in the team. + */ + state: string; + /** + * The role of the user in the team. + */ + role: string; + organization_url: string; + user: User; +} +export interface OrganizationMemberAddedEvent { + action: "member_added"; + membership: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationMemberInvitedEvent { + action: "member_invited"; + /** + * The invitation for the user or email if the action is `member_invited`. + */ + invitation: { + id: number; + node_id: string; + login: string; + email: string | null; + role: string; + created_at: string; + failed_at: string | null; + failed_reason: string | null; + inviter: User; + team_count: number; + invitation_teams_url: string; + }; + user: User; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationMemberRemovedEvent { + action: "member_removed"; + membership: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface OrganizationRenamedEvent { + action: "renamed"; + membership: Membership; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface PackagePublishedEvent { + action: "published"; + /** + * Information about the package. + */ + package: { + /** + * Unique identifier of the package. + */ + id: number; + /** + * The name of the package. + */ + name: string; + namespace: string; + description: string | null; + ecosystem: string; + /** + * The type of supported package. Packages in GitHub's Gradle registry have the type `maven`. Docker images pushed to GitHub's Container registry (`ghcr.io`) have the type `container`. You can use the type `docker` to find images that were pushed to GitHub's Docker registry (`docker.pkg.github.com`), even if these have now been migrated to the Container registry. + */ + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + html_url: string; + created_at: string; + updated_at: string; + owner: User; + /** + * A version of a software package + */ + package_version: { + /** + * Unique identifier of the package version. + */ + id: number; + version: string; + summary: string; + /** + * The name of the package version. + */ + name: string; + description: string; + body: string; + body_html: string; + release: { + url: string; + html_url: string; + id: number; + tag_name: string; + target_commitish: string; + name: string; + draft: boolean; + author: User; + prerelease: boolean; + created_at: string; + published_at: string; + }; + manifest: string; + html_url: string; + tag_name: string; + target_commitish: string; + target_oid: string; + draft: boolean; + prerelease: boolean; + created_at: string; + updated_at: string; + /** + * Package Version Metadata + */ + metadata: unknown[]; + docker_metadata: unknown[]; + package_files: { + download_url: string; + id: number; + name: string; + sha256: string; + sha1: string; + md5: string; + content_type: string; + state: string; + size: number; + created_at: string; + updated_at: string; + }[]; + author: User; + source_url: string; + installation_command: string; + }; + registry: { + about_url: string; + name: string; + type: string; + url: string; + vendor: string; + }; + }; + repository: Repository; + sender: User; + organization?: Organization; +} +export interface PackageUpdatedEvent { + action: "updated"; + /** + * Information about the package. + */ + package: { + /** + * Unique identifier of the package. + */ + id: number; + /** + * The name of the package. + */ + name: string; + namespace: string; + description: string | null; + ecosystem: string; + package_type: + | "npm" + | "maven" + | "rubygems" + | "docker" + | "nuget" + | "container"; + html_url: string; + created_at: string; + updated_at: string; + owner: User; + /** + * A version of a software package + */ + package_version: { + /** + * Unique identifier of the package version. + */ + id: number; + version: string; + summary: string; + /** + * The name of the package version. + */ + name: string; + description: string; + body: string; + body_html: string; + release: { + url: string; + html_url: string; + id: number; + tag_name: string; + target_commitish: string; + name: string; + draft: boolean; + author: User; + prerelease: boolean; + created_at: string; + published_at: string; + }; + manifest: string; + html_url: string; + tag_name: string; + target_commitish: string; + target_oid: string; + draft: boolean; + prerelease: boolean; + created_at: string; + updated_at: string; + /** + * Package Version Metadata + */ + metadata: unknown[]; + docker_metadata: unknown[]; + package_files: { + download_url: string; + id: number; + name: string; + sha256: string; + sha1: string; + md5: string; + content_type: string; + state: string; + size: number; + created_at: string; + updated_at: string; + }[]; + author: User; + source_url: string; + installation_command: string; + }; + registry: { + about_url: string; + name: string; + type: string; + url: string; + vendor: string; + }; + }; + repository: Repository; + sender: User; + organization?: Organization; +} +/** + * Page Build + */ +export interface PageBuildEvent { + id: number; + /** + * The [List GitHub Pages builds](https://docs.github.com/en/rest/reference/repos#list-github-pages-builds) itself. + */ + build: { + url: string; + status: string; + error: { + message: string | null; + }; + pusher: User; + commit: string; + duration: number; + created_at: string; + updated_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface PingEvent { + zen: string; + /** + * The ID of the webhook that triggered the ping. + */ + hook_id: number; + /** + * The [webhook configuration](https://docs.github.com/en/rest/reference/repos#get-a-repository-webhook). + */ + hook: { + type: "Repository" | "Organization" | "App"; + id: number; + name: string; + active: boolean; + /** + * When you register a new GitHub App, GitHub sends a ping event to the **webhook URL** you specified during registration. The event contains the `app_id`, which is required for [authenticating](https://docs.github.com/en/apps/building-integrations/setting-up-and-registering-github-apps/about-authentication-options-for-github-apps) an app. + */ + app_id?: number; + events: WebhookEvents; + /** + * Configuration object of the webhook + */ + config: { + /** + * The media type used to serialize the payloads. Supported values include `json` and `form`. The default is `form`. + */ + content_type: "json" | "form"; + /** + * If provided, the `secret` will be used as the `key` to generate the HMAC hex digest value for [delivery signature headers](https://docs.github.com/webhooks/event-payloads/#delivery-headers). + */ + secret?: string; + /** + * The URL to which the payloads will be delivered. + */ + url: string; + /** + * Determines whether the SSL certificate of the host for `url` will be verified when delivering payloads. Supported values include `0` (verification is performed) and `1` (verification is not performed). The default is `0`. + */ + insecure_ssl: "0" | "1"; + }; + updated_at: string; + created_at: string; + url: string; + test_url?: string; + ping_url: string; + deliveries_url: string; + last_response?: { + code: null; + status: string; + message: null; + }; + }; + repository?: Repository; + sender?: User; + organization?: Organization; +} +export interface ProjectClosedEvent { + action: "closed"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Project { + owner_url: string; + url: string; + html_url: string; + columns_url: string; + id: number; + node_id: string; + /** + * Name of the project + */ + name: string; + /** + * Body of the project + */ + body: string | null; + number: number; + /** + * State of the project; either 'open' or 'closed' + */ + state: "open" | "closed"; + creator: User; + created_at: string; + updated_at: string; +} +export interface ProjectCreatedEvent { + action: "created"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectDeletedEvent { + action: "deleted"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectEditedEvent { + action: "edited"; + /** + * The changes to the project if the action was `edited`. + */ + changes?: { + name?: { + /** + * The changes to the project if the action was `edited`. + */ + from: string; + }; + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + }; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectReopenedEvent { + action: "reopened"; + project: Project; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectCardConvertedEvent { + action: "converted"; + changes: { + note: { + from: string; + }; + }; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCard { + url: string; + project_url: string; + column_url: string; + column_id: number; + /** + * The project card's ID + */ + id: number; + node_id: string; + note: string | null; + /** + * Whether or not the card is archived + */ + archived: boolean; + creator: User; + created_at: string; + updated_at: string; + content_url?: string; + after_id?: string | number | null; +} +export interface ProjectCardCreatedEvent { + action: "created"; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCardDeletedEvent { + action: "deleted"; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCardEditedEvent { + action: "edited"; + changes: { + note: { + from: string; + }; + }; + project_card: ProjectCard; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectCardMovedEvent { + action: "moved"; + changes?: { + column_id: { + from: number; + }; + }; + project_card: ProjectCard & { + after_id: number | null; + }; + repository?: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectColumnCreatedEvent { + action: "created"; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectColumn { + url: string; + project_url: string; + cards_url: string; + /** + * The unique identifier of the project column + */ + id: number; + node_id: string; + /** + * Name of the project column + */ + name: string; + created_at: string; + updated_at: string; +} +export interface ProjectColumnDeletedEvent { + action: "deleted"; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectColumnEditedEvent { + action: "edited"; + changes: { + name?: { + from: string; + }; + }; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectColumnMovedEvent { + action: "moved"; + project_column: ProjectColumn; + repository?: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ProjectsV2ItemArchivedEvent { + changes: { + archived_at: { + from: null; + to: string; + }; + }; + action: "archived"; + projects_v2_item: ProjectsV2Item & { + archived_at: string; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +/** + * The project item itself. To find more information about the project item, you can use `node_id` (the node ID of the project item) and `project_node_id` (the node ID of the project) to query information in the GraphQL API. For more information, see "[Using the API to manage projects](https://docs.github.com/en/issues/trying-out-the-new-projects-experience/using-the-api-to-manage-projects)." + */ +export interface ProjectsV2Item { + id: number; + node_id: string; + project_node_id: string; + content_node_id: string; + content_type: "DraftIssue" | "Issue" | "PullRequest"; + creator: User; + created_at: string; + updated_at: string; + archived_at: string | null; +} +export interface ProjectsV2ItemConvertedEvent { + changes: { + content_type: { + from: "DraftIssue"; + to: "Issue"; + }; + }; + action: "converted"; + projects_v2_item: ProjectsV2Item & { + content_type: "Issue"; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemCreatedEvent { + action: "created"; + projects_v2_item: ProjectsV2Item & { + archived_at: null; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemDeletedEvent { + action: "deleted"; + projects_v2_item: ProjectsV2Item; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemEditedEvent { + changes: { + field_value: { + field_type: "single_select" | "date" | "number" | "text" | "iteration"; + field_node_id: string; + }; + }; + action: "edited"; + projects_v2_item: ProjectsV2Item; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemReorderedEvent { + changes: { + previous_projects_v2_item_node_id: { + from: string; + to: string | null; + }; + }; + action: "reordered"; + projects_v2_item: ProjectsV2Item; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface ProjectsV2ItemRestoredEvent { + changes: { + archived_at: { + from: string; + to: null; + }; + }; + action: "restored"; + projects_v2_item: ProjectsV2Item & { + archived_at: null; + }; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +/** + * When a private repository is made public. + */ +export interface PublicEvent { + repository: Repository & { + private: false; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface PullRequestAssignedEvent { + action: "assigned"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + assignee: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequest { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + /** + * Number uniquely identifying the pull request within its repository. + */ + number: number; + /** + * State of this Pull Request. Either `open` or `closed`. + */ + state: "open" | "closed"; + locked: boolean; + /** + * The title of the pull request. + */ + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + author_association: AuthorAssociation; + auto_merge: PullRequestAutoMerge | null; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + /** + * Indicates whether or not the pull request is a draft. + */ + draft: boolean; + merged: boolean | null; + mergeable: boolean | null; + rebaseable: boolean | null; + mergeable_state: string; + merged_by: User | null; + comments: number; + review_comments: number; + /** + * Indicates whether maintainers can modify the pull request. + */ + maintainer_can_modify: boolean; + commits: number; + additions: number; + deletions: number; + changed_files: number; +} +export interface Link { + href: string; +} +/** + * The status of auto merging a pull request. + */ +export interface PullRequestAutoMerge { + enabled_by: User; + /** + * The merge method to use. + */ + merge_method: "merge" | "squash" | "rebase"; + /** + * Title for the merge commit message. + */ + commit_title: string; + /** + * Commit message for the merge commit. + */ + commit_message: string; +} +export interface PullRequestAutoMergeDisabledEvent { + action: "auto_merge_disabled"; + number: number; + pull_request: PullRequest; + reason: string; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestAutoMergeEnabledEvent { + action: "auto_merge_enabled"; + number: number; + pull_request: PullRequest; + reason: string; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestClosedEvent { + action: "closed"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + /** + * State of this Pull Request. Either `open` or `closed`. + */ + state: "closed"; + closed_at: string; + merged: boolean; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestConvertedToDraftEvent { + action: "converted_to_draft"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + closed_at: null; + merged_at: null; + /** + * Indicates whether or not the pull request is a draft. + */ + draft: true; + merged: false; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestDequeuedEvent { + action: "dequeued"; + /** + * The pull request number. + */ + number: number; + /** + * The reason the pull request was removed from a merge queue. + */ + reason: string; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestEditedEvent { + action: "edited"; + /** + * The pull request number. + */ + number: number; + /** + * The changes to the comment if the action was `edited`. + */ + changes: { + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + title?: { + /** + * The previous version of the title if the action was `edited`. + */ + from: string; + }; + base?: { + ref: { + from: string; + }; + sha: { + from: string; + }; + }; + }; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestLabeledEvent { + action: "labeled"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + label: Label; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestLockedEvent { + action: "locked"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestOpenedEvent { + action: "opened"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + state: "open"; + closed_at: null; + merged_at: null; + active_lock_reason: null; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestQueuedEvent { + action: "queued"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReadyForReviewEvent { + action: "ready_for_review"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + state: "open"; + closed_at: null; + merged_at: null; + /** + * Indicates whether or not the pull request is a draft. + */ + draft: false; + merged: boolean; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReopenedEvent { + action: "reopened"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest & { + state: "open"; + closed_at: null; + merged_at: null; + merged: boolean; + merged_by: null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestSynchronizeEvent { + action: "synchronize"; + /** + * The pull request number. + */ + number: number; + before: string; + after: string; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestUnassignedEvent { + action: "unassigned"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + assignee: User; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestUnlabeledEvent { + action: "unlabeled"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + label: Label; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestUnlockedEvent { + action: "unlocked"; + /** + * The pull request number. + */ + number: number; + pull_request: PullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewDismissedEvent { + action: "dismissed"; + review: PullRequestReview & { + state: "dismissed"; + }; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +/** + * The review that was affected. + */ +export interface PullRequestReview { + /** + * Unique identifier of the review + */ + id: number; + node_id: string; + user: User; + /** + * The text of the review. + */ + body: string | null; + /** + * A commit SHA for the review. + */ + commit_id: string; + submitted_at: string | null; + state: "commented" | "changes_requested" | "approved" | "dismissed"; + html_url: string; + pull_request_url: string; + author_association: AuthorAssociation; + _links: { + html: Link; + pull_request: Link; + }; +} +export interface SimplePullRequest { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + author_association: AuthorAssociation; + auto_merge: PullRequestAutoMerge | null; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; +} +export interface PullRequestReviewEditedEvent { + action: "edited"; + changes: { + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + }; + review: PullRequestReview; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewSubmittedEvent { + action: "submitted"; + review: PullRequestReview; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewCommentCreatedEvent { + action: "created"; + comment: PullRequestReviewComment; + pull_request: { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft?: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + auto_merge?: PullRequestAutoMerge | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +/** + * The [comment](https://docs.github.com/en/rest/reference/pulls#comments) itself. + */ +export interface PullRequestReviewComment { + /** + * URL for the pull request review comment + */ + url: string; + /** + * The ID of the pull request review to which the comment belongs. + */ + pull_request_review_id: number; + /** + * The ID of the pull request review comment. + */ + id: number; + /** + * The node ID of the pull request review comment. + */ + node_id: string; + /** + * The diff of the line that the comment refers to. + */ + diff_hunk: string; + /** + * The relative path of the file to which the comment applies. + */ + path: string; + /** + * The line index in the diff to which the comment applies. + */ + position: number | null; + /** + * The index of the original line in the diff to which the comment applies. + */ + original_position: number; + /** + * The SHA of the commit to which the comment applies. + */ + commit_id: string; + /** + * The SHA of the original commit to which the comment applies. + */ + original_commit_id: string; + user: User; + /** + * The text of the comment. + */ + body: string; + created_at: string; + updated_at: string; + /** + * HTML URL for the pull request review comment. + */ + html_url: string; + /** + * URL for the pull request that the review comment belongs to. + */ + pull_request_url: string; + author_association: AuthorAssociation; + _links: { + self: Link; + html: Link; + pull_request: Link; + }; + reactions: Reactions; + /** + * The first line of the range for a multi-line comment. + */ + start_line: number | null; + /** + * The first line of the range for a multi-line comment. + */ + original_start_line: number | null; + /** + * The side of the first line of the range for a multi-line comment. + */ + start_side: "LEFT" | "RIGHT" | null; + /** + * The line of the blob to which the comment applies. The last line of the range for a multi-line comment + */ + line: number | null; + /** + * The line of the blob to which the comment applies. The last line of the range for a multi-line comment + */ + original_line: number; + /** + * The side of the first line of the range for a multi-line comment. + */ + side: "LEFT" | "RIGHT"; + /** + * The comment ID to reply to. + */ + in_reply_to_id?: number; +} +export interface PullRequestReviewCommentDeletedEvent { + action: "deleted"; + comment: PullRequestReviewComment; + pull_request: { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft?: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + auto_merge?: PullRequestAutoMerge | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewCommentEditedEvent { + action: "edited"; + /** + * The changes to the comment. + */ + changes: { + body?: { + /** + * The previous version of the body. + */ + from: string; + }; + }; + comment: PullRequestReviewComment; + pull_request: { + url: string; + id: number; + node_id: string; + html_url: string; + diff_url: string; + patch_url: string; + issue_url: string; + number: number; + state: "open" | "closed"; + locked: boolean; + title: string; + user: User; + body: string | null; + created_at: string; + updated_at: string; + closed_at: string | null; + merged_at: string | null; + merge_commit_sha: string | null; + assignee: User | null; + assignees: User[]; + requested_reviewers: (User | Team)[]; + requested_teams: Team[]; + labels: Label[]; + milestone: Milestone | null; + draft?: boolean; + commits_url: string; + review_comments_url: string; + review_comment_url: string; + comments_url: string; + statuses_url: string; + head: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + base: { + label: string; + ref: string; + sha: string; + user: User; + repo: Repository; + }; + _links: { + self: Link; + html: Link; + issue: Link; + comments: Link; + review_comments: Link; + review_comment: Link; + commits: Link; + statuses: Link; + }; + auto_merge?: PullRequestAutoMerge | null; + author_association: AuthorAssociation; + active_lock_reason: "resolved" | "off-topic" | "too heated" | "spam" | null; + }; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewThreadResolvedEvent { + action: "resolved"; + thread: { + node_id: string; + comments: PullRequestReviewComment[]; + }; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PullRequestReviewThreadUnresolvedEvent { + action: "unresolved"; + thread: { + node_id: string; + comments: PullRequestReviewComment[]; + }; + pull_request: SimplePullRequest; + repository: Repository; + installation?: InstallationLite; + organization?: Organization; + sender: User; +} +export interface PushEvent { + /** + * The full git ref that was pushed. Example: `refs/heads/main` or `refs/tags/v3.14.1`. + */ + ref: string; + /** + * The SHA of the most recent commit on `ref` before the push. + */ + before: string; + /** + * The SHA of the most recent commit on `ref` after the push. + */ + after: string; + /** + * Whether this push created the `ref`. + */ + created: boolean; + /** + * Whether this push deleted the `ref`. + */ + deleted: boolean; + /** + * Whether this push was a force push of the `ref`. + */ + forced: boolean; + base_ref: string | null; + /** + * URL that shows the changes in this `ref` update, from the `before` commit to the `after` commit. For a newly created `ref` that is directly based on the default branch, this is the comparison between the head of the default branch and the `after` commit. Otherwise, this shows all commits until the `after` commit. + */ + compare: string; + /** + * An array of commit objects describing the pushed commits. (Pushed commits are all commits that are included in the `compare` between the `before` commit and the `after` commit.) The array includes a maximum of 20 commits. If necessary, you can use the [Commits API](https://docs.github.com/en/rest/reference/repos#commits) to fetch additional commits. This limit is applied to timeline events only and isn't applied to webhook deliveries. + */ + commits: Commit[]; + /** + * For pushes where `after` is or points to a commit object, an expanded representation of that commit. For pushes where `after` refers to an annotated tag object, an expanded representation of the commit pointed to by the annotated tag. + */ + head_commit: Commit | null; + repository: Repository; + pusher: Committer; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface Commit { + id: string; + tree_id: string; + /** + * Whether this commit is distinct from any that have been pushed before. + */ + distinct: boolean; + /** + * The commit message. + */ + message: string; + /** + * The ISO 8601 timestamp of the commit. + */ + timestamp: string; + /** + * URL that points to the commit API resource. + */ + url: string; + author: Committer; + committer: Committer; + /** + * An array of files added in the commit. For extremely large commits where GitHub is unable to calculate this list in a timely manner, this may be empty even if files were added. + */ + added: string[]; + /** + * An array of files modified by the commit. For extremely large commits where GitHub is unable to calculate this list in a timely manner, this may be empty even if files were modified. + */ + modified: string[]; + /** + * An array of files removed in the commit. For extremely large commits where GitHub is unable to calculate this list in a timely manner, this may be empty even if files were removed. + */ + removed: string[]; +} +export interface ReleaseCreatedEvent { + action: "created"; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +/** + * The [release](https://docs.github.com/en/rest/reference/repos/#get-a-release) object. + */ +export interface Release { + url: string; + assets_url: string; + upload_url: string; + html_url: string; + id: number; + node_id: string; + /** + * The name of the tag. + */ + tag_name: string; + /** + * Specifies the commitish value that determines where the Git tag is created from. + */ + target_commitish: string; + name: string; + /** + * Wether the release is a draft or published + */ + draft: boolean; + author: User; + /** + * Whether the release is identified as a prerelease or a full release. + */ + prerelease: boolean; + created_at: string | null; + published_at: string | null; + assets: ReleaseAsset[]; + tarball_url: string | null; + zipball_url: string | null; + body: string; + mentions_count?: number; + reactions?: Reactions; + discussion_url?: string; +} +/** + * Data related to a release. + */ +export interface ReleaseAsset { + url: string; + browser_download_url: string; + id: number; + node_id: string; + /** + * The file name of the asset. + */ + name: string; + label: string | null; + /** + * State of the release asset. + */ + state: "uploaded"; + content_type: string; + size: number; + download_count: number; + created_at: string; + updated_at: string; + uploader?: User; +} +export interface ReleaseDeletedEvent { + action: "deleted"; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleaseEditedEvent { + action: "edited"; + changes: { + body?: { + /** + * The previous version of the body if the action was `edited`. + */ + from: string; + }; + name?: { + /** + * The previous version of the name if the action was `edited`. + */ + from: string; + }; + }; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleasePrereleasedEvent { + action: "prereleased"; + release: Release & { + /** + * Whether the release is identified as a prerelease or a full release. + */ + prerelease: true; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleasePublishedEvent { + action: "published"; + release: Release & { + published_at: string; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleaseReleasedEvent { + action: "released"; + release: Release; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface ReleaseUnpublishedEvent { + action: "unpublished"; + release: Release & { + published_at: null; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryArchivedEvent { + action: "archived"; + repository: Repository & { + /** + * Whether the repository is archived. + */ + archived: true; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryCreatedEvent { + action: "created"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryDeletedEvent { + action: "deleted"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryEditedEvent { + action: "edited"; + changes: { + description?: { + from: string | null; + }; + default_branch?: { + from: string; + }; + homepage?: { + from: string | null; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryPrivatizedEvent { + action: "privatized"; + repository: Repository & { + /** + * Whether the repository is private or public. + */ + private: true; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryPublicizedEvent { + action: "publicized"; + repository: Repository & { + /** + * Whether the repository is private or public. + */ + private: false; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryRenamedEvent { + action: "renamed"; + changes: { + repository: { + name: { + from: string; + }; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryTransferredEvent { + action: "transferred"; + changes: { + owner: { + from: { + user?: User; + }; + }; + }; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryUnarchivedEvent { + action: "unarchived"; + repository: Repository & { + /** + * Whether the repository is archived. + */ + archived: false; + }; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryDispatchEvent { + action: string; + branch: string; + client_payload: { + [k: string]: unknown; + }; + repository: Repository; + sender: User; + installation: InstallationLite; + organization?: Organization; +} +export interface RepositoryImportEvent { + status: "success" | "cancelled" | "failure"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface RepositoryVulnerabilityAlertCreateEvent { + action: "create"; + alert: RepositoryVulnerabilityAlertAlert & { + state: "open"; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +/** + * The security alert of the vulnerable dependency. + */ +export interface RepositoryVulnerabilityAlertAlert { + id: number; + number: number; + node_id: string; + state: "open" | "dismissed" | "fixed"; + affected_range: string; + affected_package_name: string; + dismisser?: User; + dismiss_reason?: string; + dismissed_at?: string; + severity: string; + ghsa_id: string; + external_reference: string; + external_identifier: string; + fixed_in: string; + fixed_at?: string; + fix_reason?: string; + created_at: string; +} +export interface RepositoryVulnerabilityAlertDismissEvent { + action: "dismiss"; + alert: RepositoryVulnerabilityAlertAlert & { + dismisser: User; + dismiss_reason: string; + dismissed_at: string; + state: "dismissed"; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +export interface RepositoryVulnerabilityAlertReopenEvent { + action: "reopen"; + alert: RepositoryVulnerabilityAlertAlert & { + state: "open"; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +export interface RepositoryVulnerabilityAlertResolveEvent { + action: "resolve"; + alert: RepositoryVulnerabilityAlertAlert & { + state: "fixed"; + fixed_at: string; + fix_reason: string; + }; + repository: Repository; + sender: GitHubOrg; + organization?: Organization; +} +export interface SecretScanningAlertCreatedEvent { + action: "created"; + /** + * The secret scanning alert involved in the event. + */ + alert: { + number: number; + secret_type: string; + resolution: null; + resolved_by: null; + resolved_at: null; + }; + repository: Repository; + organization?: Organization; + installation?: InstallationLite; +} +export interface SecretScanningAlertReopenedEvent { + action: "reopened"; + /** + * The secret scanning alert involved in the event. + */ + alert: { + number: number; + secret_type: string; + resolution: null; + resolved_by: null; + resolved_at: null; + }; + repository: Repository; + organization?: Organization; + installation?: InstallationLite; + sender: User; +} +export interface SecretScanningAlertResolvedEvent { + action: "resolved"; + /** + * The secret scanning alert involved in the event. + */ + alert: { + number: number; + secret_type: string; + resolution: "false_positive" | "wontfix" | "revoked" | "used_in_tests"; + resolved_by: User; + resolved_at: string; + }; + repository: Repository; + organization?: Organization; + installation?: InstallationLite; + sender: User; +} +export interface SecurityAdvisoryPerformedEvent { + action: "performed"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string | null; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SecurityAdvisoryPublishedEvent { + action: "published"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string | null; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SecurityAdvisoryUpdatedEvent { + action: "updated"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string | null; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SecurityAdvisoryWithdrawnEvent { + action: "withdrawn"; + /** + * The details of the security advisory, including summary, description, and severity. + */ + security_advisory: { + cvss: { + vector_string: string | null; + score: number; + }; + cwes: { + cwe_id: string; + name: string; + }[]; + ghsa_id: string; + cve_id: string | null; + summary: string; + description: string; + severity: string; + identifiers: { + value: string; + type: string; + }[]; + references: { + url: string; + }[]; + published_at: string; + updated_at: string; + withdrawn_at: string; + vulnerabilities: { + package: { + ecosystem: string; + name: string; + }; + severity: string; + vulnerable_version_range: string; + first_patched_version: { + identifier: string; + } | null; + }[]; + }; +} +export interface SponsorshipCancelledEvent { + action: "cancelled"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + sender: User; +} +/** + * The `tier_changed` and `pending_tier_change` will include the original tier before the change or pending change. For more information, see the pending tier change payload. + */ +export interface SponsorshipTier { + node_id: string; + created_at: string; + description: string; + monthly_price_in_cents: number; + monthly_price_in_dollars: number; + name: string; + is_one_time: boolean; + is_custom_ammount: boolean; +} +export interface SponsorshipCreatedEvent { + action: "created"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + sender: User; +} +export interface SponsorshipEditedEvent { + action: "edited"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + changes: { + privacy_level?: { + /** + * The `edited` event types include the details about the change when someone edits a sponsorship to change the privacy. + */ + from: string; + }; + }; + sender: User; +} +export interface SponsorshipPendingCancellationEvent { + action: "pending_cancellation"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + /** + * The `pending_cancellation` and `pending_tier_change` event types will include the date the cancellation or tier change will take effect. + */ + effective_date?: string; + sender: User; +} +export interface SponsorshipPendingTierChangeEvent { + action: "pending_tier_change"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + /** + * The `pending_cancellation` and `pending_tier_change` event types will include the date the cancellation or tier change will take effect. + */ + effective_date?: string; + changes: { + tier: { + from: SponsorshipTier; + }; + }; + sender: User; +} +export interface SponsorshipTierChangedEvent { + action: "tier_changed"; + sponsorship: { + node_id: string; + created_at: string; + sponsorable: User; + sponsor: User; + privacy_level: string; + tier: SponsorshipTier; + }; + changes: { + tier: { + from: SponsorshipTier; + }; + }; + sender: User; +} +export interface StarCreatedEvent { + action: "created"; + /** + * The time the star was created. This is a timestamp in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. Will be `null` for the `deleted` action. + */ + starred_at: string; + repository: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface StarDeletedEvent { + action: "deleted"; + /** + * The time the star was created. This is a timestamp in ISO 8601 format: `YYYY-MM-DDTHH:MM:SSZ`. Will be `null` for the `deleted` action. + */ + starred_at: null; + repository: Repository; + sender: User; + organization?: Organization; + installation?: InstallationLite; +} +export interface StatusEvent { + /** + * The unique identifier of the status. + */ + id: number; + /** + * The Commit SHA. + */ + sha: string; + name: string; + avatar_url?: string | null; + /** + * The optional link added to the status. + */ + target_url: string | null; + context: string; + /** + * The optional human-readable description added to the status. + */ + description: string | null; + /** + * The new state. Can be `pending`, `success`, `failure`, or `error`. + */ + state: "pending" | "success" | "failure" | "error"; + commit: { + sha: string; + node_id: string; + commit: { + author: Committer & { + date: string; + }; + committer: Committer & { + date: string; + }; + message: string; + tree: { + sha: string; + url: string; + }; + url: string; + comment_count: number; + verification: { + verified: boolean; + reason: + | "expired_key" + | "not_signing_key" + | "gpgverify_error" + | "gpgverify_unavailable" + | "unsigned" + | "unknown_signature_type" + | "no_user" + | "unverified_email" + | "bad_email" + | "unknown_key" + | "malformed_signature" + | "invalid" + | "valid"; + signature: string | null; + payload: string | null; + }; + }; + url: string; + html_url: string; + comments_url: string; + author: User | null; + committer: User | null; + parents: { + sha: string; + url: string; + html_url: string; + }[]; + }; + /** + * An array of branch objects containing the status' SHA. Each branch contains the given SHA, but the SHA may or may not be the head of the branch. The array includes a maximum of 10 branches. + */ + branches: { + name: string; + commit: { + sha: string; + url: string; + }; + protected: boolean; + }[]; + created_at: string; + updated_at: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface TeamAddedToRepositoryEvent { + action: "added_to_repository"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamCreatedEvent { + action: "created"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamDeletedEvent { + action: "deleted"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamEditedEvent { + action: "edited"; + /** + * The changes to the team if the action was `edited`. + */ + changes: { + description?: { + /** + * The previous version of the description if the action was `edited`. + */ + from: string; + }; + name?: { + /** + * The previous version of the name if the action was `edited`. + */ + from: string; + }; + privacy?: { + /** + * The previous version of the team's privacy if the action was `edited`. + */ + from: string; + }; + repository?: { + permissions: { + from: { + /** + * The previous version of the team member's `admin` permission on a repository, if the action was `edited`. + */ + admin?: boolean; + /** + * The previous version of the team member's `pull` permission on a repository, if the action was `edited`. + */ + pull?: boolean; + /** + * The previous version of the team member's `push` permission on a repository, if the action was `edited`. + */ + push?: boolean; + }; + }; + }; + }; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamRemovedFromRepositoryEvent { + action: "removed_from_repository"; + team: Team; + repository?: Repository; + sender: User; + organization: Organization; + installation?: InstallationLite; +} +export interface TeamAddEvent { + team: Team; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization: Organization; +} +export interface WatchStartedEvent { + action: "started"; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; +} +export interface WorkflowDispatchEvent { + /** + * Inputs to the workflow. Each key represents the name of the input while it's value represents the value of that input. + */ + inputs: { + [k: string]: unknown; + } | null; + /** + * The branch ref from which the workflow was run. + */ + ref: string; + repository: Repository; + sender: User; + installation?: InstallationLite; + organization?: Organization; + /** + * Relative path to the workflow file which contains the workflow. + */ + workflow: string; +} +export interface WorkflowJobCompletedEvent { + action: "completed"; + organization?: Organization; + installation?: InstallationLite; + repository: Repository; + sender: User; + workflow_job: WorkflowJob & { + conclusion: "success" | "failure" | "cancelled" | "skipped"; + }; +} +/** + * The workflow job. Many `workflow_job` keys, such as `head_sha`, `conclusion`, and `started_at` are the same as those in a [`check_run`](#check_run) object. + */ +export interface WorkflowJob { + id: number; + run_id: number; + run_attempt: number; + run_url: string; + head_sha: string; + node_id: string; + name: string; + check_run_url: string; + html_url: string; + url: string; + /** + * The current status of the job. Can be `queued`, `in_progress`, or `completed`. + */ + status: "queued" | "in_progress" | "completed"; + steps: WorkflowStep[]; + conclusion: "success" | "failure" | "cancelled" | "skipped" | null; + /** + * Custom labels for the job. Specified by the [`"runs-on"` attribute](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idruns-on) in the workflow YAML. + */ + labels: string[]; + /** + * The ID of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_id: number | null; + /** + * The name of the runner that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_name: string | null; + /** + * The ID of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_group_id: number | null; + /** + * The name of the runner group that is running this job. This will be `null` as long as `workflow_job[status]` is `queued`. + */ + runner_group_name: string | null; + started_at: string; + completed_at: string | null; +} +export interface WorkflowStepInProgress { + name: string; + status: "in_progress"; + conclusion: null; + number: number; + started_at: string; + completed_at: null; +} +export interface WorkflowStepCompleted { + name: string; + status: "completed"; + conclusion: "failure" | "skipped" | "success"; + number: number; + started_at: string; + completed_at: string; +} +export interface WorkflowJobInProgressEvent { + action: "in_progress"; + organization?: Organization; + installation?: InstallationLite; + repository: Repository; + sender: User; + workflow_job: WorkflowJob & { + status: "in_progress"; + }; +} +export interface WorkflowJobQueuedEvent { + action: "queued"; + organization?: Organization; + installation?: InstallationLite; + repository: Repository; + sender: User; + workflow_job: WorkflowJob & { + status: "queued"; + }; +} +export interface WorkflowRunCompletedEvent { + action: "completed"; + organization?: Organization; + repository: Repository; + sender: User; + workflow: Workflow; + workflow_run: WorkflowRun & { + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped"; + }; + installation?: InstallationLite; +} +export interface WorkflowRun { + /** + * The URL to the artifacts for the workflow run. + */ + artifacts_url: string; + /** + * The URL to cancel the workflow run. + */ + cancel_url: string; + /** + * The URL to the associated check suite. + */ + check_suite_url: string; + /** + * The ID of the associated check suite. + */ + check_suite_id: number; + /** + * The node ID of the associated check suite. + */ + check_suite_node_id: string; + conclusion: + | "success" + | "failure" + | "neutral" + | "cancelled" + | "timed_out" + | "action_required" + | "stale" + | "skipped" + | null; + created_at: string; + event: string; + head_branch: string; + head_commit: SimpleCommit; + head_repository: RepositoryLite; + /** + * The SHA of the head commit that points to the version of the workflow being run. + */ + head_sha: string; + /** + * The full path of the workflow + */ + path: string; + display_title: string; + html_url: string; + /** + * The ID of the workflow run. + */ + id: number; + /** + * The URL to the jobs for the workflow run. + */ + jobs_url: string; + /** + * The URL to download the logs for the workflow run. + */ + logs_url: string; + node_id: string; + /** + * The name of the workflow run. + */ + name: string; + pull_requests: { + url: string; + id: number; + number: number; + head: { + ref: string; + sha: string; + repo: RepoRef; + }; + base: { + ref: string; + sha: string; + repo: RepoRef; + }; + }[]; + repository: RepositoryLite; + /** + * The URL to rerun the workflow run. + */ + rerun_url: string; + /** + * The auto incrementing run number for the workflow run. + */ + run_number: number; + status: "requested" | "in_progress" | "completed" | "queued" | "waiting"; + updated_at: string; + /** + * The URL to the workflow run. + */ + url: string; + /** + * The ID of the parent workflow. + */ + workflow_id: number; + /** + * The URL to the workflow. + */ + workflow_url: string; + /** + * Attempt number of the run, 1 for first attempt and higher if the workflow was re-run. + */ + run_attempt: number; + referenced_workflows?: ReferencedWorkflow[]; + /** + * The start time of the latest run. Resets on re-run. + */ + run_started_at: string; + /** + * The URL to the previous attempted run of this workflow, if one exists. + */ + previous_attempt_url: string | null; + actor: User; + triggering_actor: User; +} +export interface RepositoryLite { + /** + * A template for the API URL to download the repository as an archive. + */ + archive_url: string; + /** + * A template for the API URL to list the available assignees for issues in the repository. + */ + assignees_url: string; + /** + * A template for the API URL to create or retrieve a raw Git blob in the repository. + */ + blobs_url: string; + /** + * A template for the API URL to get information about branches in the repository. + */ + branches_url: string; + /** + * A template for the API URL to get information about collaborators of the repository. + */ + collaborators_url: string; + /** + * A template for the API URL to get information about comments on the repository. + */ + comments_url: string; + /** + * A template for the API URL to get information about commits on the repository. + */ + commits_url: string; + /** + * A template for the API URL to compare two commits or refs. + */ + compare_url: string; + /** + * A template for the API URL to get the contents of the repository. + */ + contents_url: string; + /** + * A template for the API URL to list the contributors to the repository. + */ + contributors_url: string; + /** + * The API URL to list the deployments of the repository. + */ + deployments_url: string; + /** + * The repository description. + */ + description: string | null; + /** + * The API URL to list the downloads on the repository. + */ + downloads_url: string; + /** + * The API URL to list the events of the repository. + */ + events_url: string; + /** + * Whether the repository is a fork. + */ + fork: boolean; + /** + * The API URL to list the forks of the repository. + */ + forks_url: string; + /** + * The full, globally unique, name of the repository. + */ + full_name: string; + /** + * A template for the API URL to get information about Git commits of the repository. + */ + git_commits_url: string; + /** + * A template for the API URL to get information about Git refs of the repository. + */ + git_refs_url: string; + /** + * A template for the API URL to get information about Git tags of the repository. + */ + git_tags_url: string; + /** + * The API URL to list the hooks on the repository. + */ + hooks_url: string; + /** + * The URL to view the repository on GitHub.com. + */ + html_url: string; + /** + * Unique identifier of the repository + */ + id: number; + /** + * A template for the API URL to get information about issue comments on the repository. + */ + issue_comment_url: string; + /** + * A template for the API URL to get information about issue events on the repository. + */ + issue_events_url: string; + /** + * A template for the API URL to get information about issues on the repository. + */ + issues_url: string; + /** + * A template for the API URL to get information about deploy keys on the repository. + */ + keys_url: string; + /** + * A template for the API URL to get information about labels of the repository. + */ + labels_url: string; + /** + * The API URL to get information about the languages of the repository. + */ + languages_url: string; + /** + * The API URL to merge branches in the repository. + */ + merges_url: string; + /** + * A template for the API URL to get information about milestones of the repository. + */ + milestones_url: string; + /** + * The name of the repository. + */ + name: string; + /** + * The GraphQL identifier of the repository. + */ + node_id: string; + /** + * A template for the API URL to get information about notifications on the repository. + */ + notifications_url: string; + owner: User; + /** + * Whether the repository is private or public. + */ + private: boolean; + /** + * A template for the API URL to get information about pull requests on the repository. + */ + pulls_url: string; + /** + * A template for the API URL to get information about releases on the repository. + */ + releases_url: string; + /** + * The API URL to list the stargazers on the repository. + */ + stargazers_url: string; + /** + * A template for the API URL to get information about statuses of a commit. + */ + statuses_url: string; + /** + * The API URL to list the subscribers on the repository. + */ + subscribers_url: string; + /** + * The API URL to subscribe to notifications for this repository. + */ + subscription_url: string; + /** + * The API URL to get information about tags on the repository. + */ + tags_url: string; + /** + * The API URL to list the teams on the repository. + */ + teams_url: string; + /** + * A template for the API URL to create or retrieve a raw Git tree of the repository. + */ + trees_url: string; + /** + * The URL to get more information about the repository from the GitHub API. + */ + url: string; +} +export interface WorkflowRunInProgressEvent { + action: "in_progress"; + organization?: Organization; + repository: Repository; + sender: User; + workflow: Workflow; + workflow_run: WorkflowRun; + installation?: InstallationLite; +} +export interface WorkflowRunRequestedEvent { + action: "requested"; + organization?: Organization; + repository: Repository; + sender: User; + workflow: Workflow; + workflow_run: WorkflowRun; + installation?: InstallationLite; +} + +export interface EventPayloadMap { + branch_protection_rule: BranchProtectionRuleEvent; + check_run: CheckRunEvent; + check_suite: CheckSuiteEvent; + code_scanning_alert: CodeScanningAlertEvent; + commit_comment: CommitCommentEvent; + create: CreateEvent; + delete: DeleteEvent; + dependabot_alert: DependabotAlertEvent; + deploy_key: DeployKeyEvent; + deployment: DeploymentEvent; + deployment_status: DeploymentStatusEvent; + discussion: DiscussionEvent; + discussion_comment: DiscussionCommentEvent; + fork: ForkEvent; + github_app_authorization: GithubAppAuthorizationEvent; + gollum: GollumEvent; + installation: InstallationEvent; + installation_repositories: InstallationRepositoriesEvent; + issue_comment: IssueCommentEvent; + issues: IssuesEvent; + label: LabelEvent; + marketplace_purchase: MarketplacePurchaseEvent; + member: MemberEvent; + membership: MembershipEvent; + merge_group: MergeGroupEvent; + meta: MetaEvent; + milestone: MilestoneEvent; + org_block: OrgBlockEvent; + organization: OrganizationEvent; + package: PackageEvent; + page_build: PageBuildEvent; + ping: PingEvent; + project: ProjectEvent; + project_card: ProjectCardEvent; + project_column: ProjectColumnEvent; + projects_v2_item: ProjectsV2ItemEvent; + public: PublicEvent; + pull_request: PullRequestEvent; + pull_request_review: PullRequestReviewEvent; + pull_request_review_comment: PullRequestReviewCommentEvent; + pull_request_review_thread: PullRequestReviewThreadEvent; + push: PushEvent; + release: ReleaseEvent; + repository: RepositoryEvent; + repository_dispatch: RepositoryDispatchEvent; + repository_import: RepositoryImportEvent; + repository_vulnerability_alert: RepositoryVulnerabilityAlertEvent; + secret_scanning_alert: SecretScanningAlertEvent; + security_advisory: SecurityAdvisoryEvent; + sponsorship: SponsorshipEvent; + star: StarEvent; + status: StatusEvent; + team: TeamEvent; + team_add: TeamAddEvent; + watch: WatchEvent; + workflow_dispatch: WorkflowDispatchEvent; + workflow_job: WorkflowJobEvent; + workflow_run: WorkflowRunEvent; +} + +export type Asset = ReleaseAsset; +export type WebhookEvent = Schema; +export type WebhookEventMap = EventPayloadMap; +export type WebhookEventName = keyof EventPayloadMap; diff --git a/node_modules/@octokit/webhooks/LICENSE.md b/node_modules/@octokit/webhooks/LICENSE.md new file mode 100644 index 0000000..4b464e5 --- /dev/null +++ b/node_modules/@octokit/webhooks/LICENSE.md @@ -0,0 +1,20 @@ +Copyright (c) 2017 Gregor Martynus + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@octokit/webhooks/README.md b/node_modules/@octokit/webhooks/README.md new file mode 100644 index 0000000..2d8ecb8 --- /dev/null +++ b/node_modules/@octokit/webhooks/README.md @@ -0,0 +1,731 @@ +# @octokit/webhooks + +> GitHub webhook events toolset for Node.js + +[![@latest](https://img.shields.io/npm/v/@octokit/webhooks.svg)](https://www.npmjs.com/package/@octokit/webhooks) +[![Test](https://github.com/octokit/webhooks.js/workflows/Test/badge.svg)](https://github.com/octokit/webhooks.js/actions?query=workflow) + + + +- [Usage](#usage) +- [Local development](#local-development) +- [API](#api) + - [Constructor](#constructor) + - [webhooks.sign()](#webhookssign) + - [webhooks.verify()](#webhooksverify) + - [webhooks.verifyAndReceive()](#webhooksverifyandreceive) + - [webhooks.receive()](#webhooksreceive) + - [webhooks.on()](#webhookson) + - [webhooks.onAny()](#webhooksonany) + - [webhooks.onError()](#webhooksonerror) + - [webhooks.removeListener()](#webhooksremovelistener) + - [createNodeMiddleware()](#createnodemiddleware) + - [Webhook events](#webhook-events) + - [emitterEventNames](#emittereventnames) +- [TypeScript](#typescript) + - [`EmitterWebhookEventName`](#emitterwebhookeventname) + - [`EmitterWebhookEvent`](#emitterwebhookevent) +- [License](#license) + + + +`@octokit/webhooks` helps to handle webhook events received from GitHub. + +[GitHub webhooks](https://docs.github.com/webhooks/) can be registered in multiple ways + +1. In repository or organization settings on [github.com](https://github.com/). +2. Using the REST API for [repositories](https://docs.github.com/rest/reference/repos#webhooks) or [organizations](https://docs.github.com/rest/reference/orgs#webhooks/) +3. By [creating a GitHub App](https://docs.github.com/developers/apps/creating-a-github-app). + +Note that while setting a secret is optional on GitHub, it is required to be set in order to use `@octokit/webhooks`. Content Type must be set to `application/json`, `application/x-www-form-urlencoded` is not supported. + +## Usage + +```js +// install with: npm install @octokit/webhooks +const { Webhooks, createNodeMiddleware } = require("@octokit/webhooks"); +const webhooks = new Webhooks({ + secret: "mysecret", +}); + +webhooks.onAny(({ id, name, payload }) => { + console.log(name, "event received"); +}); + +require("http").createServer(createNodeMiddleware(webhooks)).listen(3000); +// can now receive webhook events at /api/github/webhooks +``` + +## Local development + +You can receive webhooks on your local machine or even browser using [EventSource](https://developer.mozilla.org/en-US/docs/Web/API/EventSource) and [smee.io](https://smee.io/). + +Go to [smee.io](https://smee.io/) and Start a new channel. Then copy the "Webhook Proxy URL" and + +1. enter it in the GitHub App’s "Webhook URL" input +2. pass it to the [EventSource](https://github.com/EventSource/eventsource) constructor, see below + +```js +const webhookProxyUrl = "https://smee.io/IrqK0nopGAOc847"; // replace with your own Webhook Proxy URL +const source = new EventSource(webhookProxyUrl); +source.onmessage = (event) => { + const webhookEvent = JSON.parse(event.data); + webhooks + .verifyAndReceive({ + id: webhookEvent["x-request-id"], + name: webhookEvent["x-github-event"], + signature: webhookEvent["x-hub-signature"], + payload: webhookEvent.body, + }) + .catch(console.error); +}; +``` + +`EventSource` is a native browser API and can be polyfilled for browsers that don’t support it. In node, you can use the [`eventsource`](https://github.com/EventSource/eventsource) package: install with `npm install eventsource`, then `const EventSource = require('eventsource')` + +## API + +1. [Constructor](#constructor) +2. [webhooks.sign()](#webhookssign) +3. [webhooks.verify()](#webhooksverify) +4. [webhooks.verifyAndReceive()](#webhooksverifyandreceive) +5. [webhooks.receive()](#webhooksreceive) +6. [webhooks.on()](#webhookson) +7. [webhooks.onAny()](#webhooksonany) +8. [webhooks.onError()](#webhooksonerror) +9. [webhooks.removeListener()](#webhooksremovelistener) +10. [createNodeMiddleware()](#createnodemiddleware) +11. [Webhook events](#webhook-events) +12. [emitterEventNames](#emittereventnames) + +### Constructor + +```js +new Webhooks({ secret /*, transform */ }); +``` + + + + + + + + + + + + + + + + +
+ + secret + + (String) + + Required. + Secret as configured in GitHub Settings. +
+ + transform + + (Function) + + Only relevant for webhooks.on. + Transform emitted event before calling handlers. Can be asynchronous. +
+ log + + object + + + +Used for internal logging. Defaults to [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) with `debug` and `info` doing nothing. + +
+ +Returns the `webhooks` API. + +### webhooks.sign() + +```js +webhooks.sign(eventPayload); +``` + + + + + + + + +
+ + eventPayload + + + (Object) + + + Required. + Webhook request payload as received from GitHub +
+ +Returns a `signature` string. Throws error if `eventPayload` is not passed. + +The `sign` method can be imported as static method from [`@octokit/webhooks-methods`](https://github.com/octokit/webhooks-methods.js/#readme). + +### webhooks.verify() + +```js +webhooks.verify(eventPayload, signature); +``` + + + + + + + + + + + + +
+ + eventPayload + + + (Object or String) + + + Required. + Webhook event request payload as received from GitHub. +
+ + signature + + + (String) + + + Required. + Signature string as calculated by webhooks.sign(). +
+ +Returns `true` or `false`. Throws error if `eventPayload` or `signature` not passed. + +The `verify` method can be imported as static method from [`@octokit/webhooks-methods`](https://github.com/octokit/webhooks-methods.js/#readme). + +### webhooks.verifyAndReceive() + +```js +webhooks.verifyAndReceive({ id, name, payload, signature }); +``` + + + + + + + + + + + + + + + + + + + + +
+ + id + + + String + + + Unique webhook event request id +
+ + name + + + String + + + Required. + Name of the event. (Event names are set as X-GitHub-Event header + in the webhook event request.) +
+ + payload + + + Object or String + + + Required. + Webhook event request payload as received from GitHub. +
+ + signature + + + (String) + + + Required. + Signature string as calculated by webhooks.sign(). +
+ +Returns a promise. + +Verifies event using [webhooks.verify()](#webhooksverify), then handles the event using [webhooks.receive()](#webhooksreceive). + +Additionally, if verification fails, rejects the returned promise and emits an `error` event. + +Example + +```js +const { Webhooks } = require("@octokit/webhooks"); +const webhooks = new Webhooks({ + secret: "mysecret", +}); +eventHandler.on("error", handleSignatureVerificationError); + +// put this inside your webhooks route handler +eventHandler + .verifyAndReceive({ + id: request.headers["x-github-delivery"], + name: request.headers["x-github-event"], + payload: request.body, + signature: request.headers["x-hub-signature-256"], + }) + .catch(handleErrorsFromHooks); +``` + +### webhooks.receive() + +```js +webhooks.receive({ id, name, payload }); +``` + + + + + + + + + + + + + + + + +
+ + id + + + String + + + Unique webhook event request id +
+ + name + + + String + + + Required. + Name of the event. (Event names are set as X-GitHub-Event header + in the webhook event request.) +
+ + payload + + + Object + + + Required. + Webhook event request payload as received from GitHub. +
+ +Returns a promise. Runs all handlers set with [`webhooks.on()`](#webhookson) in parallel and waits for them to finish. If one of the handlers rejects or throws an error, then `webhooks.receive()` rejects. The returned error has an `.errors` property which holds an array of all errors caught from the handlers. If no errors occur, `webhooks.receive()` resolves without passing any value. + +The `.receive()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.on() + +```js +webhooks.on(eventName, handler); +webhooks.on(eventNames, handler); +``` + + + + + + + + + + + + + + + + +
+ + eventName + + + String + + + Required. + Name of the event. One of GitHub's supported event names, or (if the event has an action property) the name of an event followed by its action in the form of <event>.<action>. +
+ + eventNames + + + Array + + + Required. + Array of event names. +
+ + handler + + + Function + + + Required. + Method to be run each time the event with the passed name is received. + the handler function can be an async function, throw an error or + return a Promise. The handler is called with an event object: {id, name, payload}. +
+ +The `.on()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.onAny() + +```js +webhooks.onAny(handler); +``` + + + + + + + + +
+ + handler + + + Function + + + Required. + Method to be run each time any event is received. + the handler function can be an async function, throw an error or + return a Promise. The handler is called with an event object: {id, name, payload}. +
+ +The `.onAny()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.onError() + +```js +webhooks.onError(handler); +``` + +If a webhook event handler throws an error or returns a promise that rejects, an error event is triggered. You can use this handler for logging or reporting events. The passed error object has a .event property which has all information on the event. + +Asynchronous `error` event handler are not blocking the `.receive()` method from completing. + + + + + + + + +
+ + handler + + + Function + + + Required. + Method to be run each time a webhook event handler throws an error or returns a promise that rejects. + The handler function can be an async function, + return a Promise. The handler is called with an error object that has a .event property which has all the information on the event: {id, name, payload}. +
+ +The `.onError()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### webhooks.removeListener() + +```js +webhooks.removeListener(eventName, handler); +webhooks.removeListener(eventNames, handler); +``` + + + + + + + + + + + + + + + + +
+ + eventName + + + String + + + Required. + Name of the event. One of GitHub's supported event names, or (if the event has an action property) the name of an event followed by its action in the form of <event>.<action>, or '*' for the onAny() method or 'error' for the onError() method. +
+ + eventNames + + + Array + + + Required. + Array of event names. +
+ + handler + + + Function + + + Required. + Method which was previously passed to webhooks.on(). If the same handler was registered multiple times for the same event, only the most recent handler gets removed. +
+ +The `.removeListener()` method belongs to the `event-handler` module which can be used [standalone](src/event-handler/). + +### createNodeMiddleware() + +```js +const { createServer } = require("http"); +const { Webhooks, createNodeMiddleware } = require("@octokit/webhooks"); + +const webhooks = new Webhooks({ + secret: "mysecret", +}); + +const middleware = createNodeMiddleware(webhooks, { path: "/" }); + +createServer(middleware).listen(3000); +// can now receive user authorization callbacks at POST / +``` + + + + + + + + + + + + + + + + + + + + +
+ webhooks + + Webhooks instance + + + Required. +
+ path + + string + + + Custom path to match requests against. Defaults to /api/github/webhooks. +
+ log + + object + + + +Used for internal logging. Defaults to [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) with `debug` and `info` doing nothing. + +
+ onUnhandledRequest + + function + + + +Defaults to + +```js +function onUnhandledRequest(request, response) { + response.writeHead(400, { + "content-type": "application/json", + }); + response.end( + JSON.stringify({ + error: error.message, + }) + ); +} +``` + +
+ +### Webhook events + +See the full list of [event types with example payloads](https://docs.github.com/developers/webhooks-and-events/webhook-events-and-payloads/). + +If there are actions for a webhook, events are emitted for both, the webhook name as well as a combination of the webhook name and the action, e.g. `installation` and `installation.created`. + + + +| Event | Actions | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| [`branch_protection_rule`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#branch_protection_rule) | `created`
`deleted`
`edited` | +| [`check_run`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#check_run) | `completed`
`created`
`requested_action`
`rerequested` | +| [`check_suite`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#check_suite) | `completed`
`requested`
`rerequested` | +| [`code_scanning_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#code_scanning_alert) | `appeared_in_branch`
`closed_by_user`
`created`
`fixed`
`reopened`
`reopened_by_user` | +| [`commit_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#commit_comment) | `created` | +| [`create`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#create) | | +| [`delete`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#delete) | | +| [`dependabot_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#dependabot_alert) | `created`
`dismissed`
`fixed`
`reintroduced`
`reopened` | +| [`deploy_key`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#deploy_key) | `created`
`deleted` | +| [`deployment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment) | `created` | +| [`deployment_status`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#deployment_status) | `created` | +| [`discussion`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion) | `answered`
`category_changed`
`created`
`deleted`
`edited`
`labeled`
`locked`
`pinned`
`transferred`
`unanswered`
`unlabeled`
`unlocked`
`unpinned` | +| [`discussion_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#discussion_comment) | `created`
`deleted`
`edited` | +| [`fork`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#fork) | | +| [`github_app_authorization`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#github_app_authorization) | `revoked` | +| [`gollum`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#gollum) | | +| [`installation`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#installation) | `created`
`deleted`
`new_permissions_accepted`
`suspend`
`unsuspend` | +| [`installation_repositories`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#installation_repositories) | `added`
`removed` | +| [`issue_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment) | `created`
`deleted`
`edited` | +| [`issues`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issues) | `assigned`
`closed`
`deleted`
`demilestoned`
`edited`
`labeled`
`locked`
`milestoned`
`opened`
`pinned`
`reopened`
`transferred`
`unassigned`
`unlabeled`
`unlocked`
`unpinned` | +| [`label`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#label) | `created`
`deleted`
`edited` | +| [`marketplace_purchase`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#marketplace_purchase) | `cancelled`
`changed`
`pending_change`
`pending_change_cancelled`
`purchased` | +| [`member`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#member) | `added`
`edited`
`removed` | +| [`membership`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#membership) | `added`
`removed` | +| [`merge_group`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#merge_group) | `checks_requested` | +| [`meta`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#meta) | `deleted` | +| [`milestone`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#milestone) | `closed`
`created`
`deleted`
`edited`
`opened` | +| [`org_block`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#org_block) | `blocked`
`unblocked` | +| [`organization`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#organization) | `deleted`
`member_added`
`member_invited`
`member_removed`
`renamed` | +| [`package`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#package) | `published`
`updated` | +| [`page_build`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#page_build) | | +| [`ping`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#ping) | | +| [`project`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#project) | `closed`
`created`
`deleted`
`edited`
`reopened` | +| [`project_card`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#project_card) | `converted`
`created`
`deleted`
`edited`
`moved` | +| [`project_column`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#project_column) | `created`
`deleted`
`edited`
`moved` | +| [`projects_v2_item`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#projects_v2_item) | `archived`
`converted`
`created`
`deleted`
`edited`
`reordered`
`restored` | +| [`public`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#public) | | +| [`pull_request`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request) | `assigned`
`auto_merge_disabled`
`auto_merge_enabled`
`closed`
`converted_to_draft`
`dequeued`
`edited`
`labeled`
`locked`
`opened`
`queued`
`ready_for_review`
`reopened`
`review_request_removed`
`review_requested`
`synchronize`
`unassigned`
`unlabeled`
`unlocked` | +| [`pull_request_review`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request_review) | `dismissed`
`edited`
`submitted` | +| [`pull_request_review_comment`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request_review_comment) | `created`
`deleted`
`edited` | +| [`pull_request_review_thread`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request_review_thread) | `resolved`
`unresolved` | +| [`push`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push) | | +| [`release`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release) | `created`
`deleted`
`edited`
`prereleased`
`published`
`released`
`unpublished` | +| [`repository`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository) | `archived`
`created`
`deleted`
`edited`
`privatized`
`publicized`
`renamed`
`transferred`
`unarchived` | +| [`repository_dispatch`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_dispatch) | | +| [`repository_import`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_import) | | +| [`repository_vulnerability_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#repository_vulnerability_alert) | `create`
`dismiss`
`reopen`
`resolve` | +| [`secret_scanning_alert`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#secret_scanning_alert) | `created`
`reopened`
`resolved` | +| [`security_advisory`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#security_advisory) | `performed`
`published`
`updated`
`withdrawn` | +| [`sponsorship`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#sponsorship) | `cancelled`
`created`
`edited`
`pending_cancellation`
`pending_tier_change`
`tier_changed` | +| [`star`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#star) | `created`
`deleted` | +| [`status`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#status) | | +| [`team`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#team) | `added_to_repository`
`created`
`deleted`
`edited`
`removed_from_repository` | +| [`team_add`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#team_add) | | +| [`watch`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#watch) | `started` | +| [`workflow_dispatch`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_dispatch) | | +| [`workflow_job`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_job) | `completed`
`in_progress`
`queued` | +| [`workflow_run`](https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#workflow_run) | `completed`
`in_progress`
`requested` | + + + +### emitterEventNames + +A read only tuple containing all the possible combinations of the webhook events + actions listed above. This might be useful in GUI and input validation. + +```js +import { emitterEventNames } from "@octokit/webhooks"; +emitterEventNames; // ["check_run", "check_run.completed", ...] +``` + +## TypeScript + +The types for the webhook payloads are sourced from [`@octokit/webhooks-types`](https://github.com/octokit/webhooks/tree/master/payload-types), +which can be used by themselves. + +In addition to these types, `@octokit/webhooks` exports 2 types specific to itself: + +Note that changes to the exported types are not considered breaking changes, as the changes will not impact production code, but only fail locally or during CI at build time. + +### `EmitterWebhookEventName` + +A union of all possible events and event/action combinations supported by the event emitter, e.g. `"check_run" | "check_run.completed" | ... many more ... | "workflow_run.requested"`. + +### `EmitterWebhookEvent` + +The object that is emitted by `@octokit/webhooks` as an event; made up of an `id`, `name`, and `payload` properties. +An optional generic parameter can be passed to narrow the type of the `name` and `payload` properties based on event names or event/action combinations, e.g. `EmitterWebhookEvent<"check_run" | "code_scanning_alert.fixed">`. + +## License + +[MIT](LICENSE.md) diff --git a/node_modules/@octokit/webhooks/dist-node/index.js b/node_modules/@octokit/webhooks/dist-node/index.js new file mode 100644 index 0000000..d98b899 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-node/index.js @@ -0,0 +1,374 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var AggregateError = _interopDefault(require('aggregate-error')); +var webhooksMethods = require('@octokit/webhooks-methods'); + +const createLogger = logger => ({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console), + ...logger +}); + +// THIS FILE IS GENERATED - DO NOT EDIT DIRECTLY +// make edits in scripts/generate-types.ts +const emitterEventNames = ["branch_protection_rule", "branch_protection_rule.created", "branch_protection_rule.deleted", "branch_protection_rule.edited", "check_run", "check_run.completed", "check_run.created", "check_run.requested_action", "check_run.rerequested", "check_suite", "check_suite.completed", "check_suite.requested", "check_suite.rerequested", "code_scanning_alert", "code_scanning_alert.appeared_in_branch", "code_scanning_alert.closed_by_user", "code_scanning_alert.created", "code_scanning_alert.fixed", "code_scanning_alert.reopened", "code_scanning_alert.reopened_by_user", "commit_comment", "commit_comment.created", "create", "delete", "dependabot_alert", "dependabot_alert.created", "dependabot_alert.dismissed", "dependabot_alert.fixed", "dependabot_alert.reintroduced", "dependabot_alert.reopened", "deploy_key", "deploy_key.created", "deploy_key.deleted", "deployment", "deployment.created", "deployment_status", "deployment_status.created", "discussion", "discussion.answered", "discussion.category_changed", "discussion.created", "discussion.deleted", "discussion.edited", "discussion.labeled", "discussion.locked", "discussion.pinned", "discussion.transferred", "discussion.unanswered", "discussion.unlabeled", "discussion.unlocked", "discussion.unpinned", "discussion_comment", "discussion_comment.created", "discussion_comment.deleted", "discussion_comment.edited", "fork", "github_app_authorization", "github_app_authorization.revoked", "gollum", "installation", "installation.created", "installation.deleted", "installation.new_permissions_accepted", "installation.suspend", "installation.unsuspend", "installation_repositories", "installation_repositories.added", "installation_repositories.removed", "issue_comment", "issue_comment.created", "issue_comment.deleted", "issue_comment.edited", "issues", "issues.assigned", "issues.closed", "issues.deleted", "issues.demilestoned", "issues.edited", "issues.labeled", "issues.locked", "issues.milestoned", "issues.opened", "issues.pinned", "issues.reopened", "issues.transferred", "issues.unassigned", "issues.unlabeled", "issues.unlocked", "issues.unpinned", "label", "label.created", "label.deleted", "label.edited", "marketplace_purchase", "marketplace_purchase.cancelled", "marketplace_purchase.changed", "marketplace_purchase.pending_change", "marketplace_purchase.pending_change_cancelled", "marketplace_purchase.purchased", "member", "member.added", "member.edited", "member.removed", "membership", "membership.added", "membership.removed", "merge_group", "merge_group.checks_requested", "meta", "meta.deleted", "milestone", "milestone.closed", "milestone.created", "milestone.deleted", "milestone.edited", "milestone.opened", "org_block", "org_block.blocked", "org_block.unblocked", "organization", "organization.deleted", "organization.member_added", "organization.member_invited", "organization.member_removed", "organization.renamed", "package", "package.published", "package.updated", "page_build", "ping", "project", "project.closed", "project.created", "project.deleted", "project.edited", "project.reopened", "project_card", "project_card.converted", "project_card.created", "project_card.deleted", "project_card.edited", "project_card.moved", "project_column", "project_column.created", "project_column.deleted", "project_column.edited", "project_column.moved", "projects_v2_item", "projects_v2_item.archived", "projects_v2_item.converted", "projects_v2_item.created", "projects_v2_item.deleted", "projects_v2_item.edited", "projects_v2_item.reordered", "projects_v2_item.restored", "public", "pull_request", "pull_request.assigned", "pull_request.auto_merge_disabled", "pull_request.auto_merge_enabled", "pull_request.closed", "pull_request.converted_to_draft", "pull_request.dequeued", "pull_request.edited", "pull_request.labeled", "pull_request.locked", "pull_request.opened", "pull_request.queued", "pull_request.ready_for_review", "pull_request.reopened", "pull_request.review_request_removed", "pull_request.review_requested", "pull_request.synchronize", "pull_request.unassigned", "pull_request.unlabeled", "pull_request.unlocked", "pull_request_review", "pull_request_review.dismissed", "pull_request_review.edited", "pull_request_review.submitted", "pull_request_review_comment", "pull_request_review_comment.created", "pull_request_review_comment.deleted", "pull_request_review_comment.edited", "pull_request_review_thread", "pull_request_review_thread.resolved", "pull_request_review_thread.unresolved", "push", "release", "release.created", "release.deleted", "release.edited", "release.prereleased", "release.published", "release.released", "release.unpublished", "repository", "repository.archived", "repository.created", "repository.deleted", "repository.edited", "repository.privatized", "repository.publicized", "repository.renamed", "repository.transferred", "repository.unarchived", "repository_dispatch", "repository_import", "repository_vulnerability_alert", "repository_vulnerability_alert.create", "repository_vulnerability_alert.dismiss", "repository_vulnerability_alert.reopen", "repository_vulnerability_alert.resolve", "secret_scanning_alert", "secret_scanning_alert.created", "secret_scanning_alert.reopened", "secret_scanning_alert.resolved", "security_advisory", "security_advisory.performed", "security_advisory.published", "security_advisory.updated", "security_advisory.withdrawn", "sponsorship", "sponsorship.cancelled", "sponsorship.created", "sponsorship.edited", "sponsorship.pending_cancellation", "sponsorship.pending_tier_change", "sponsorship.tier_changed", "star", "star.created", "star.deleted", "status", "team", "team.added_to_repository", "team.created", "team.deleted", "team.edited", "team.removed_from_repository", "team_add", "watch", "watch.started", "workflow_dispatch", "workflow_job", "workflow_job.completed", "workflow_job.in_progress", "workflow_job.queued", "workflow_run", "workflow_run.completed", "workflow_run.in_progress", "workflow_run.requested"]; + +function handleEventHandlers(state, webhookName, handler) { + if (!state.hooks[webhookName]) { + state.hooks[webhookName] = []; + } + + state.hooks[webhookName].push(handler); +} + +function receiverOn(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach(webhookName => receiverOn(state, webhookName, handler)); + return; + } + + if (["*", "error"].includes(webhookNameOrNames)) { + const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; + const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; + throw new Error(message); + } + + if (!emitterEventNames.includes(webhookNameOrNames)) { + state.log.warn(`"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`); + } + + handleEventHandlers(state, webhookNameOrNames, handler); +} +function receiverOnAny(state, handler) { + handleEventHandlers(state, "*", handler); +} +function receiverOnError(state, handler) { + handleEventHandlers(state, "error", handler); +} + +// Errors thrown or rejected Promises in "error" event handlers are not handled +// as they are in the webhook event handlers. If errors occur, we log a +// "Fatal: Error occurred" message to stdout +function wrapErrorHandler(handler, error) { + let returnValue; + + try { + returnValue = handler(error); + } catch (error) { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error); + } + + if (returnValue && returnValue.catch) { + returnValue.catch(error => { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error); + }); + } +} + +// @ts-ignore to address #245 + +function getHooks(state, eventPayloadAction, eventName) { + const hooks = [state.hooks[eventName], state.hooks["*"]]; + + if (eventPayloadAction) { + hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); + } + + return [].concat(...hooks.filter(Boolean)); +} // main handler function + + +function receiverHandle(state, event) { + const errorHandlers = state.hooks.error || []; + + if (event instanceof Error) { + const error = Object.assign(new AggregateError([event]), { + event, + errors: [event] + }); + errorHandlers.forEach(handler => wrapErrorHandler(handler, error)); + return Promise.reject(error); + } + + if (!event || !event.name) { + throw new AggregateError(["Event name not passed"]); + } + + if (!event.payload) { + throw new AggregateError(["Event payload not passed"]); + } // flatten arrays of event listeners and remove undefined values + + + const hooks = getHooks(state, "action" in event.payload ? event.payload.action : null, event.name); + + if (hooks.length === 0) { + return Promise.resolve(); + } + + const errors = []; + const promises = hooks.map(handler => { + let promise = Promise.resolve(event); + + if (state.transform) { + promise = promise.then(state.transform); + } + + return promise.then(event => { + return handler(event); + }).catch(error => errors.push(Object.assign(error, { + event + }))); + }); + return Promise.all(promises).then(() => { + if (errors.length === 0) { + return; + } + + const error = new AggregateError(errors); + Object.assign(error, { + event, + errors + }); + errorHandlers.forEach(handler => wrapErrorHandler(handler, error)); + throw error; + }); +} + +function removeListener(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach(webhookName => removeListener(state, webhookName, handler)); + return; + } + + if (!state.hooks[webhookNameOrNames]) { + return; + } // remove last hook that has been added, that way + // it behaves the same as removeListener + + + for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { + if (state.hooks[webhookNameOrNames][i] === handler) { + state.hooks[webhookNameOrNames].splice(i, 1); + return; + } + } +} + +function createEventHandler(options) { + const state = { + hooks: {}, + log: createLogger(options && options.log) + }; + + if (options && options.transform) { + state.transform = options.transform; + } + + return { + on: receiverOn.bind(null, state), + onAny: receiverOnAny.bind(null, state), + onError: receiverOnError.bind(null, state), + removeListener: removeListener.bind(null, state), + receive: receiverHandle.bind(null, state) + }; +} + +/** + * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end + */ +function toNormalizedJsonString(payload) { + const payloadString = JSON.stringify(payload); + return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, s => { + return s.substr(0, 3) + s.substr(3).toUpperCase(); + }); +} + +async function sign(secret, payload) { + return webhooksMethods.sign(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload)); +} + +async function verify(secret, payload, signature) { + return webhooksMethods.verify(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload), signature); +} + +async function verifyAndReceive(state, event) { + // verify will validate that the secret is not undefined + const matchesSignature = await webhooksMethods.verify(state.secret, typeof event.payload === "object" ? toNormalizedJsonString(event.payload) : event.payload, event.signature); + + if (!matchesSignature) { + const error = new Error("[@octokit/webhooks] signature does not match event payload and secret"); + return state.eventHandler.receive(Object.assign(error, { + event, + status: 400 + })); + } + + return state.eventHandler.receive({ + id: event.id, + name: event.name, + payload: typeof event.payload === "string" ? JSON.parse(event.payload) : event.payload + }); +} + +const WEBHOOK_HEADERS = ["x-github-event", "x-hub-signature-256", "x-github-delivery"]; // https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#delivery-headers + +function getMissingHeaders(request) { + return WEBHOOK_HEADERS.filter(header => !(header in request.headers)); +} + +// @ts-ignore to address #245 +function getPayload(request) { + // If request.body already exists we can stop here + // See https://github.com/octokit/webhooks.js/pull/23 + if (request.body) return Promise.resolve(request.body); + return new Promise((resolve, reject) => { + let data = ""; + request.setEncoding("utf8"); // istanbul ignore next + + request.on("error", error => reject(new AggregateError([error]))); + request.on("data", chunk => data += chunk); + request.on("end", () => { + try { + resolve(JSON.parse(data)); + } catch (error) { + error.message = "Invalid JSON"; + error.status = 400; + reject(new AggregateError([error])); + } + }); + }); +} + +async function middleware(webhooks, options, request, response, next) { + let pathname; + + try { + pathname = new URL(request.url, "http://localhost").pathname; + } catch (error) { + response.writeHead(422, { + "content-type": "application/json" + }); + response.end(JSON.stringify({ + error: `Request URL could not be parsed: ${request.url}` + })); + return; + } + + const isUnknownRoute = request.method !== "POST" || pathname !== options.path; + const isExpressMiddleware = typeof next === "function"; + + if (isUnknownRoute) { + if (isExpressMiddleware) { + return next(); + } else { + return options.onUnhandledRequest(request, response); + } + } + + const missingHeaders = getMissingHeaders(request).join(", "); + + if (missingHeaders) { + response.writeHead(400, { + "content-type": "application/json" + }); + response.end(JSON.stringify({ + error: `Required headers missing: ${missingHeaders}` + })); + return; + } + + const eventName = request.headers["x-github-event"]; + const signatureSHA256 = request.headers["x-hub-signature-256"]; + const id = request.headers["x-github-delivery"]; + options.log.debug(`${eventName} event received (id: ${id})`); // GitHub will abort the request if it does not receive a response within 10s + // See https://github.com/octokit/webhooks.js/issues/185 + + let didTimeout = false; + const timeout = setTimeout(() => { + didTimeout = true; + response.statusCode = 202; + response.end("still processing\n"); + }, 9000).unref(); + + try { + const payload = await getPayload(request); + await webhooks.verifyAndReceive({ + id: id, + name: eventName, + payload: payload, + signature: signatureSHA256 + }); + clearTimeout(timeout); + if (didTimeout) return; + response.end("ok\n"); + } catch (error) { + clearTimeout(timeout); + if (didTimeout) return; + const err = Array.from(error)[0]; + const errorMessage = err.message ? `${err.name}: ${err.message}` : "Error: An Unspecified error occurred"; + response.statusCode = typeof err.status !== "undefined" ? err.status : 500; + options.log.error(error); + response.end(JSON.stringify({ + error: errorMessage + })); + } +} + +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json" + }); + response.end(JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}` + })); +} + +function createNodeMiddleware(webhooks, { + path = "/api/github/webhooks", + onUnhandledRequest = onUnhandledRequestDefault, + log = createLogger() +} = {}) { + return middleware.bind(null, webhooks, { + path, + onUnhandledRequest, + log + }); +} + +class Webhooks { + constructor(options) { + if (!options || !options.secret) { + throw new Error("[@octokit/webhooks] options.secret required"); + } + + const state = { + eventHandler: createEventHandler(options), + secret: options.secret, + hooks: {}, + log: createLogger(options.log) + }; + this.sign = sign.bind(null, options.secret); + this.verify = verify.bind(null, options.secret); + this.on = state.eventHandler.on; + this.onAny = state.eventHandler.onAny; + this.onError = state.eventHandler.onError; + this.removeListener = state.eventHandler.removeListener; + this.receive = state.eventHandler.receive; + this.verifyAndReceive = verifyAndReceive.bind(null, state); + } + +} + +exports.Webhooks = Webhooks; +exports.createEventHandler = createEventHandler; +exports.createNodeMiddleware = createNodeMiddleware; +exports.emitterEventNames = emitterEventNames; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/webhooks/dist-node/index.js.map b/node_modules/@octokit/webhooks/dist-node/index.js.map new file mode 100644 index 0000000..807795d --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/createLogger.js","../dist-src/generated/webhook-names.js","../dist-src/event-handler/on.js","../dist-src/event-handler/wrap-error-handler.js","../dist-src/event-handler/receive.js","../dist-src/event-handler/remove-listener.js","../dist-src/event-handler/index.js","../dist-src/to-normalized-json-string.js","../dist-src/sign.js","../dist-src/verify.js","../dist-src/verify-and-receive.js","../dist-src/middleware/node/get-missing-headers.js","../dist-src/middleware/node/get-payload.js","../dist-src/middleware/node/middleware.js","../dist-src/middleware/node/on-unhandled-request-default.js","../dist-src/middleware/node/index.js","../dist-src/index.js"],"sourcesContent":["export const createLogger = (logger) => ({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n ...logger,\n});\n","// THIS FILE IS GENERATED - DO NOT EDIT DIRECTLY\n// make edits in scripts/generate-types.ts\nexport const emitterEventNames = [\n \"branch_protection_rule\",\n \"branch_protection_rule.created\",\n \"branch_protection_rule.deleted\",\n \"branch_protection_rule.edited\",\n \"check_run\",\n \"check_run.completed\",\n \"check_run.created\",\n \"check_run.requested_action\",\n \"check_run.rerequested\",\n \"check_suite\",\n \"check_suite.completed\",\n \"check_suite.requested\",\n \"check_suite.rerequested\",\n \"code_scanning_alert\",\n \"code_scanning_alert.appeared_in_branch\",\n \"code_scanning_alert.closed_by_user\",\n \"code_scanning_alert.created\",\n \"code_scanning_alert.fixed\",\n \"code_scanning_alert.reopened\",\n \"code_scanning_alert.reopened_by_user\",\n \"commit_comment\",\n \"commit_comment.created\",\n \"create\",\n \"delete\",\n \"dependabot_alert\",\n \"dependabot_alert.created\",\n \"dependabot_alert.dismissed\",\n \"dependabot_alert.fixed\",\n \"dependabot_alert.reintroduced\",\n \"dependabot_alert.reopened\",\n \"deploy_key\",\n \"deploy_key.created\",\n \"deploy_key.deleted\",\n \"deployment\",\n \"deployment.created\",\n \"deployment_status\",\n \"deployment_status.created\",\n \"discussion\",\n \"discussion.answered\",\n \"discussion.category_changed\",\n \"discussion.created\",\n \"discussion.deleted\",\n \"discussion.edited\",\n \"discussion.labeled\",\n \"discussion.locked\",\n \"discussion.pinned\",\n \"discussion.transferred\",\n \"discussion.unanswered\",\n \"discussion.unlabeled\",\n \"discussion.unlocked\",\n \"discussion.unpinned\",\n \"discussion_comment\",\n \"discussion_comment.created\",\n \"discussion_comment.deleted\",\n \"discussion_comment.edited\",\n \"fork\",\n \"github_app_authorization\",\n \"github_app_authorization.revoked\",\n \"gollum\",\n \"installation\",\n \"installation.created\",\n \"installation.deleted\",\n \"installation.new_permissions_accepted\",\n \"installation.suspend\",\n \"installation.unsuspend\",\n \"installation_repositories\",\n \"installation_repositories.added\",\n \"installation_repositories.removed\",\n \"issue_comment\",\n \"issue_comment.created\",\n \"issue_comment.deleted\",\n \"issue_comment.edited\",\n \"issues\",\n \"issues.assigned\",\n \"issues.closed\",\n \"issues.deleted\",\n \"issues.demilestoned\",\n \"issues.edited\",\n \"issues.labeled\",\n \"issues.locked\",\n \"issues.milestoned\",\n \"issues.opened\",\n \"issues.pinned\",\n \"issues.reopened\",\n \"issues.transferred\",\n \"issues.unassigned\",\n \"issues.unlabeled\",\n \"issues.unlocked\",\n \"issues.unpinned\",\n \"label\",\n \"label.created\",\n \"label.deleted\",\n \"label.edited\",\n \"marketplace_purchase\",\n \"marketplace_purchase.cancelled\",\n \"marketplace_purchase.changed\",\n \"marketplace_purchase.pending_change\",\n \"marketplace_purchase.pending_change_cancelled\",\n \"marketplace_purchase.purchased\",\n \"member\",\n \"member.added\",\n \"member.edited\",\n \"member.removed\",\n \"membership\",\n \"membership.added\",\n \"membership.removed\",\n \"merge_group\",\n \"merge_group.checks_requested\",\n \"meta\",\n \"meta.deleted\",\n \"milestone\",\n \"milestone.closed\",\n \"milestone.created\",\n \"milestone.deleted\",\n \"milestone.edited\",\n \"milestone.opened\",\n \"org_block\",\n \"org_block.blocked\",\n \"org_block.unblocked\",\n \"organization\",\n \"organization.deleted\",\n \"organization.member_added\",\n \"organization.member_invited\",\n \"organization.member_removed\",\n \"organization.renamed\",\n \"package\",\n \"package.published\",\n \"package.updated\",\n \"page_build\",\n \"ping\",\n \"project\",\n \"project.closed\",\n \"project.created\",\n \"project.deleted\",\n \"project.edited\",\n \"project.reopened\",\n \"project_card\",\n \"project_card.converted\",\n \"project_card.created\",\n \"project_card.deleted\",\n \"project_card.edited\",\n \"project_card.moved\",\n \"project_column\",\n \"project_column.created\",\n \"project_column.deleted\",\n \"project_column.edited\",\n \"project_column.moved\",\n \"projects_v2_item\",\n \"projects_v2_item.archived\",\n \"projects_v2_item.converted\",\n \"projects_v2_item.created\",\n \"projects_v2_item.deleted\",\n \"projects_v2_item.edited\",\n \"projects_v2_item.reordered\",\n \"projects_v2_item.restored\",\n \"public\",\n \"pull_request\",\n \"pull_request.assigned\",\n \"pull_request.auto_merge_disabled\",\n \"pull_request.auto_merge_enabled\",\n \"pull_request.closed\",\n \"pull_request.converted_to_draft\",\n \"pull_request.dequeued\",\n \"pull_request.edited\",\n \"pull_request.labeled\",\n \"pull_request.locked\",\n \"pull_request.opened\",\n \"pull_request.queued\",\n \"pull_request.ready_for_review\",\n \"pull_request.reopened\",\n \"pull_request.review_request_removed\",\n \"pull_request.review_requested\",\n \"pull_request.synchronize\",\n \"pull_request.unassigned\",\n \"pull_request.unlabeled\",\n \"pull_request.unlocked\",\n \"pull_request_review\",\n \"pull_request_review.dismissed\",\n \"pull_request_review.edited\",\n \"pull_request_review.submitted\",\n \"pull_request_review_comment\",\n \"pull_request_review_comment.created\",\n \"pull_request_review_comment.deleted\",\n \"pull_request_review_comment.edited\",\n \"pull_request_review_thread\",\n \"pull_request_review_thread.resolved\",\n \"pull_request_review_thread.unresolved\",\n \"push\",\n \"release\",\n \"release.created\",\n \"release.deleted\",\n \"release.edited\",\n \"release.prereleased\",\n \"release.published\",\n \"release.released\",\n \"release.unpublished\",\n \"repository\",\n \"repository.archived\",\n \"repository.created\",\n \"repository.deleted\",\n \"repository.edited\",\n \"repository.privatized\",\n \"repository.publicized\",\n \"repository.renamed\",\n \"repository.transferred\",\n \"repository.unarchived\",\n \"repository_dispatch\",\n \"repository_import\",\n \"repository_vulnerability_alert\",\n \"repository_vulnerability_alert.create\",\n \"repository_vulnerability_alert.dismiss\",\n \"repository_vulnerability_alert.reopen\",\n \"repository_vulnerability_alert.resolve\",\n \"secret_scanning_alert\",\n \"secret_scanning_alert.created\",\n \"secret_scanning_alert.reopened\",\n \"secret_scanning_alert.resolved\",\n \"security_advisory\",\n \"security_advisory.performed\",\n \"security_advisory.published\",\n \"security_advisory.updated\",\n \"security_advisory.withdrawn\",\n \"sponsorship\",\n \"sponsorship.cancelled\",\n \"sponsorship.created\",\n \"sponsorship.edited\",\n \"sponsorship.pending_cancellation\",\n \"sponsorship.pending_tier_change\",\n \"sponsorship.tier_changed\",\n \"star\",\n \"star.created\",\n \"star.deleted\",\n \"status\",\n \"team\",\n \"team.added_to_repository\",\n \"team.created\",\n \"team.deleted\",\n \"team.edited\",\n \"team.removed_from_repository\",\n \"team_add\",\n \"watch\",\n \"watch.started\",\n \"workflow_dispatch\",\n \"workflow_job\",\n \"workflow_job.completed\",\n \"workflow_job.in_progress\",\n \"workflow_job.queued\",\n \"workflow_run\",\n \"workflow_run.completed\",\n \"workflow_run.in_progress\",\n \"workflow_run.requested\",\n];\n","import { emitterEventNames } from \"../generated/webhook-names\";\nfunction handleEventHandlers(state, webhookName, handler) {\n if (!state.hooks[webhookName]) {\n state.hooks[webhookName] = [];\n }\n state.hooks[webhookName].push(handler);\n}\nexport function receiverOn(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach((webhookName) => receiverOn(state, webhookName, handler));\n return;\n }\n if ([\"*\", \"error\"].includes(webhookNameOrNames)) {\n const webhookName = webhookNameOrNames === \"*\" ? \"any\" : webhookNameOrNames;\n const message = `Using the \"${webhookNameOrNames}\" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`;\n throw new Error(message);\n }\n if (!emitterEventNames.includes(webhookNameOrNames)) {\n state.log.warn(`\"${webhookNameOrNames}\" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`);\n }\n handleEventHandlers(state, webhookNameOrNames, handler);\n}\nexport function receiverOnAny(state, handler) {\n handleEventHandlers(state, \"*\", handler);\n}\nexport function receiverOnError(state, handler) {\n handleEventHandlers(state, \"error\", handler);\n}\n","// Errors thrown or rejected Promises in \"error\" event handlers are not handled\n// as they are in the webhook event handlers. If errors occur, we log a\n// \"Fatal: Error occurred\" message to stdout\nexport function wrapErrorHandler(handler, error) {\n let returnValue;\n try {\n returnValue = handler(error);\n }\n catch (error) {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error);\n }\n if (returnValue && returnValue.catch) {\n returnValue.catch((error) => {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error);\n });\n }\n}\n","// @ts-ignore to address #245\nimport AggregateError from \"aggregate-error\";\nimport { wrapErrorHandler } from \"./wrap-error-handler\";\nfunction getHooks(state, eventPayloadAction, eventName) {\n const hooks = [state.hooks[eventName], state.hooks[\"*\"]];\n if (eventPayloadAction) {\n hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]);\n }\n return [].concat(...hooks.filter(Boolean));\n}\n// main handler function\nexport function receiverHandle(state, event) {\n const errorHandlers = state.hooks.error || [];\n if (event instanceof Error) {\n const error = Object.assign(new AggregateError([event]), {\n event,\n errors: [event],\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n return Promise.reject(error);\n }\n if (!event || !event.name) {\n throw new AggregateError([\"Event name not passed\"]);\n }\n if (!event.payload) {\n throw new AggregateError([\"Event payload not passed\"]);\n }\n // flatten arrays of event listeners and remove undefined values\n const hooks = getHooks(state, \"action\" in event.payload ? event.payload.action : null, event.name);\n if (hooks.length === 0) {\n return Promise.resolve();\n }\n const errors = [];\n const promises = hooks.map((handler) => {\n let promise = Promise.resolve(event);\n if (state.transform) {\n promise = promise.then(state.transform);\n }\n return promise\n .then((event) => {\n return handler(event);\n })\n .catch((error) => errors.push(Object.assign(error, { event })));\n });\n return Promise.all(promises).then(() => {\n if (errors.length === 0) {\n return;\n }\n const error = new AggregateError(errors);\n Object.assign(error, {\n event,\n errors,\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n throw error;\n });\n}\n","export function removeListener(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach((webhookName) => removeListener(state, webhookName, handler));\n return;\n }\n if (!state.hooks[webhookNameOrNames]) {\n return;\n }\n // remove last hook that has been added, that way\n // it behaves the same as removeListener\n for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) {\n if (state.hooks[webhookNameOrNames][i] === handler) {\n state.hooks[webhookNameOrNames].splice(i, 1);\n return;\n }\n }\n}\n","import { createLogger } from \"../createLogger\";\nimport { receiverOn as on, receiverOnAny as onAny, receiverOnError as onError, } from \"./on\";\nimport { receiverHandle as receive } from \"./receive\";\nimport { removeListener } from \"./remove-listener\";\nexport function createEventHandler(options) {\n const state = {\n hooks: {},\n log: createLogger(options && options.log),\n };\n if (options && options.transform) {\n state.transform = options.transform;\n }\n return {\n on: on.bind(null, state),\n onAny: onAny.bind(null, state),\n onError: onError.bind(null, state),\n removeListener: removeListener.bind(null, state),\n receive: receive.bind(null, state),\n };\n}\n","/**\n * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end\n */\nexport function toNormalizedJsonString(payload) {\n const payloadString = JSON.stringify(payload);\n return payloadString.replace(/[^\\\\]\\\\u[\\da-f]{4}/g, (s) => {\n return s.substr(0, 3) + s.substr(3).toUpperCase();\n });\n}\n","import { sign as signMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nexport async function sign(secret, payload) {\n return signMethod(secret, typeof payload === \"string\" ? payload : toNormalizedJsonString(payload));\n}\n","import { verify as verifyMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nexport async function verify(secret, payload, signature) {\n return verifyMethod(secret, typeof payload === \"string\" ? payload : toNormalizedJsonString(payload), signature);\n}\n","import { verify } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nexport async function verifyAndReceive(state, event) {\n // verify will validate that the secret is not undefined\n const matchesSignature = await verify(state.secret, typeof event.payload === \"object\"\n ? toNormalizedJsonString(event.payload)\n : event.payload, event.signature);\n if (!matchesSignature) {\n const error = new Error(\"[@octokit/webhooks] signature does not match event payload and secret\");\n return state.eventHandler.receive(Object.assign(error, { event, status: 400 }));\n }\n return state.eventHandler.receive({\n id: event.id,\n name: event.name,\n payload: typeof event.payload === \"string\"\n ? JSON.parse(event.payload)\n : event.payload,\n });\n}\n","const WEBHOOK_HEADERS = [\n \"x-github-event\",\n \"x-hub-signature-256\",\n \"x-github-delivery\",\n];\n// https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#delivery-headers\nexport function getMissingHeaders(request) {\n return WEBHOOK_HEADERS.filter((header) => !(header in request.headers));\n}\n","// @ts-ignore to address #245\nimport AggregateError from \"aggregate-error\";\nexport function getPayload(request) {\n // If request.body already exists we can stop here\n // See https://github.com/octokit/webhooks.js/pull/23\n if (request.body)\n return Promise.resolve(request.body);\n return new Promise((resolve, reject) => {\n let data = \"\";\n request.setEncoding(\"utf8\");\n // istanbul ignore next\n request.on(\"error\", (error) => reject(new AggregateError([error])));\n request.on(\"data\", (chunk) => (data += chunk));\n request.on(\"end\", () => {\n try {\n resolve(JSON.parse(data));\n }\n catch (error) {\n error.message = \"Invalid JSON\";\n error.status = 400;\n reject(new AggregateError([error]));\n }\n });\n });\n}\n","import { getMissingHeaders } from \"./get-missing-headers\";\nimport { getPayload } from \"./get-payload\";\nexport async function middleware(webhooks, options, request, response, next) {\n let pathname;\n try {\n pathname = new URL(request.url, \"http://localhost\").pathname;\n }\n catch (error) {\n response.writeHead(422, {\n \"content-type\": \"application/json\",\n });\n response.end(JSON.stringify({\n error: `Request URL could not be parsed: ${request.url}`,\n }));\n return;\n }\n const isUnknownRoute = request.method !== \"POST\" || pathname !== options.path;\n const isExpressMiddleware = typeof next === \"function\";\n if (isUnknownRoute) {\n if (isExpressMiddleware) {\n return next();\n }\n else {\n return options.onUnhandledRequest(request, response);\n }\n }\n const missingHeaders = getMissingHeaders(request).join(\", \");\n if (missingHeaders) {\n response.writeHead(400, {\n \"content-type\": \"application/json\",\n });\n response.end(JSON.stringify({\n error: `Required headers missing: ${missingHeaders}`,\n }));\n return;\n }\n const eventName = request.headers[\"x-github-event\"];\n const signatureSHA256 = request.headers[\"x-hub-signature-256\"];\n const id = request.headers[\"x-github-delivery\"];\n options.log.debug(`${eventName} event received (id: ${id})`);\n // GitHub will abort the request if it does not receive a response within 10s\n // See https://github.com/octokit/webhooks.js/issues/185\n let didTimeout = false;\n const timeout = setTimeout(() => {\n didTimeout = true;\n response.statusCode = 202;\n response.end(\"still processing\\n\");\n }, 9000).unref();\n try {\n const payload = await getPayload(request);\n await webhooks.verifyAndReceive({\n id: id,\n name: eventName,\n payload: payload,\n signature: signatureSHA256,\n });\n clearTimeout(timeout);\n if (didTimeout)\n return;\n response.end(\"ok\\n\");\n }\n catch (error) {\n clearTimeout(timeout);\n if (didTimeout)\n return;\n const err = Array.from(error)[0];\n const errorMessage = err.message\n ? `${err.name}: ${err.message}`\n : \"Error: An Unspecified error occurred\";\n response.statusCode = typeof err.status !== \"undefined\" ? err.status : 500;\n options.log.error(error);\n response.end(JSON.stringify({\n error: errorMessage,\n }));\n }\n}\n","export function onUnhandledRequestDefault(request, response) {\n response.writeHead(404, {\n \"content-type\": \"application/json\",\n });\n response.end(JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`,\n }));\n}\n","import { createLogger } from \"../../createLogger\";\nimport { middleware } from \"./middleware\";\nimport { onUnhandledRequestDefault } from \"./on-unhandled-request-default\";\nexport function createNodeMiddleware(webhooks, { path = \"/api/github/webhooks\", onUnhandledRequest = onUnhandledRequestDefault, log = createLogger(), } = {}) {\n return middleware.bind(null, webhooks, {\n path,\n onUnhandledRequest,\n log,\n });\n}\n","import { createLogger } from \"./createLogger\";\nimport { createEventHandler } from \"./event-handler/index\";\nimport { sign } from \"./sign\";\nimport { verify } from \"./verify\";\nimport { verifyAndReceive } from \"./verify-and-receive\";\nexport { createNodeMiddleware } from \"./middleware/node/index\";\nexport { emitterEventNames } from \"./generated/webhook-names\";\n// U holds the return value of `transform` function in Options\nclass Webhooks {\n constructor(options) {\n if (!options || !options.secret) {\n throw new Error(\"[@octokit/webhooks] options.secret required\");\n }\n const state = {\n eventHandler: createEventHandler(options),\n secret: options.secret,\n hooks: {},\n log: createLogger(options.log),\n };\n this.sign = sign.bind(null, options.secret);\n this.verify = verify.bind(null, options.secret);\n this.on = state.eventHandler.on;\n this.onAny = state.eventHandler.onAny;\n this.onError = state.eventHandler.onError;\n this.removeListener = state.eventHandler.removeListener;\n this.receive = state.eventHandler.receive;\n this.verifyAndReceive = verifyAndReceive.bind(null, state);\n }\n}\nexport { createEventHandler, Webhooks, };\n"],"names":["createLogger","logger","debug","info","warn","console","bind","error","emitterEventNames","handleEventHandlers","state","webhookName","handler","hooks","push","receiverOn","webhookNameOrNames","Array","isArray","forEach","includes","message","charAt","toUpperCase","slice","Error","log","receiverOnAny","receiverOnError","wrapErrorHandler","returnValue","catch","getHooks","eventPayloadAction","eventName","unshift","concat","filter","Boolean","receiverHandle","event","errorHandlers","Object","assign","AggregateError","errors","Promise","reject","name","payload","action","length","resolve","promises","map","promise","transform","then","all","removeListener","i","splice","createEventHandler","options","on","onAny","onError","receive","toNormalizedJsonString","payloadString","JSON","stringify","replace","s","substr","sign","secret","signMethod","verify","signature","verifyMethod","verifyAndReceive","matchesSignature","eventHandler","status","id","parse","WEBHOOK_HEADERS","getMissingHeaders","request","header","headers","getPayload","body","data","setEncoding","chunk","middleware","webhooks","response","next","pathname","URL","url","writeHead","end","isUnknownRoute","method","path","isExpressMiddleware","onUnhandledRequest","missingHeaders","join","signatureSHA256","didTimeout","timeout","setTimeout","statusCode","unref","clearTimeout","err","from","errorMessage","onUnhandledRequestDefault","createNodeMiddleware","Webhooks","constructor"],"mappings":";;;;;;;;;AAAO,MAAMA,YAAY,GAAIC,MAAD,KAAa;EACrCC,KAAK,EAAE,MAAM,EADwB;EAErCC,IAAI,EAAE,MAAM,EAFyB;EAGrCC,IAAI,EAAEC,OAAO,CAACD,IAAR,CAAaE,IAAb,CAAkBD,OAAlB,CAH+B;EAIrCE,KAAK,EAAEF,OAAO,CAACE,KAAR,CAAcD,IAAd,CAAmBD,OAAnB,CAJ8B;EAKrC,GAAGJ;AALkC,CAAb,CAArB;;ACAP;AACA;AACA,MAAaO,iBAAiB,GAAG,CAC7B,wBAD6B,EAE7B,gCAF6B,EAG7B,gCAH6B,EAI7B,+BAJ6B,EAK7B,WAL6B,EAM7B,qBAN6B,EAO7B,mBAP6B,EAQ7B,4BAR6B,EAS7B,uBAT6B,EAU7B,aAV6B,EAW7B,uBAX6B,EAY7B,uBAZ6B,EAa7B,yBAb6B,EAc7B,qBAd6B,EAe7B,wCAf6B,EAgB7B,oCAhB6B,EAiB7B,6BAjB6B,EAkB7B,2BAlB6B,EAmB7B,8BAnB6B,EAoB7B,sCApB6B,EAqB7B,gBArB6B,EAsB7B,wBAtB6B,EAuB7B,QAvB6B,EAwB7B,QAxB6B,EAyB7B,kBAzB6B,EA0B7B,0BA1B6B,EA2B7B,4BA3B6B,EA4B7B,wBA5B6B,EA6B7B,+BA7B6B,EA8B7B,2BA9B6B,EA+B7B,YA/B6B,EAgC7B,oBAhC6B,EAiC7B,oBAjC6B,EAkC7B,YAlC6B,EAmC7B,oBAnC6B,EAoC7B,mBApC6B,EAqC7B,2BArC6B,EAsC7B,YAtC6B,EAuC7B,qBAvC6B,EAwC7B,6BAxC6B,EAyC7B,oBAzC6B,EA0C7B,oBA1C6B,EA2C7B,mBA3C6B,EA4C7B,oBA5C6B,EA6C7B,mBA7C6B,EA8C7B,mBA9C6B,EA+C7B,wBA/C6B,EAgD7B,uBAhD6B,EAiD7B,sBAjD6B,EAkD7B,qBAlD6B,EAmD7B,qBAnD6B,EAoD7B,oBApD6B,EAqD7B,4BArD6B,EAsD7B,4BAtD6B,EAuD7B,2BAvD6B,EAwD7B,MAxD6B,EAyD7B,0BAzD6B,EA0D7B,kCA1D6B,EA2D7B,QA3D6B,EA4D7B,cA5D6B,EA6D7B,sBA7D6B,EA8D7B,sBA9D6B,EA+D7B,uCA/D6B,EAgE7B,sBAhE6B,EAiE7B,wBAjE6B,EAkE7B,2BAlE6B,EAmE7B,iCAnE6B,EAoE7B,mCApE6B,EAqE7B,eArE6B,EAsE7B,uBAtE6B,EAuE7B,uBAvE6B,EAwE7B,sBAxE6B,EAyE7B,QAzE6B,EA0E7B,iBA1E6B,EA2E7B,eA3E6B,EA4E7B,gBA5E6B,EA6E7B,qBA7E6B,EA8E7B,eA9E6B,EA+E7B,gBA/E6B,EAgF7B,eAhF6B,EAiF7B,mBAjF6B,EAkF7B,eAlF6B,EAmF7B,eAnF6B,EAoF7B,iBApF6B,EAqF7B,oBArF6B,EAsF7B,mBAtF6B,EAuF7B,kBAvF6B,EAwF7B,iBAxF6B,EAyF7B,iBAzF6B,EA0F7B,OA1F6B,EA2F7B,eA3F6B,EA4F7B,eA5F6B,EA6F7B,cA7F6B,EA8F7B,sBA9F6B,EA+F7B,gCA/F6B,EAgG7B,8BAhG6B,EAiG7B,qCAjG6B,EAkG7B,+CAlG6B,EAmG7B,gCAnG6B,EAoG7B,QApG6B,EAqG7B,cArG6B,EAsG7B,eAtG6B,EAuG7B,gBAvG6B,EAwG7B,YAxG6B,EAyG7B,kBAzG6B,EA0G7B,oBA1G6B,EA2G7B,aA3G6B,EA4G7B,8BA5G6B,EA6G7B,MA7G6B,EA8G7B,cA9G6B,EA+G7B,WA/G6B,EAgH7B,kBAhH6B,EAiH7B,mBAjH6B,EAkH7B,mBAlH6B,EAmH7B,kBAnH6B,EAoH7B,kBApH6B,EAqH7B,WArH6B,EAsH7B,mBAtH6B,EAuH7B,qBAvH6B,EAwH7B,cAxH6B,EAyH7B,sBAzH6B,EA0H7B,2BA1H6B,EA2H7B,6BA3H6B,EA4H7B,6BA5H6B,EA6H7B,sBA7H6B,EA8H7B,SA9H6B,EA+H7B,mBA/H6B,EAgI7B,iBAhI6B,EAiI7B,YAjI6B,EAkI7B,MAlI6B,EAmI7B,SAnI6B,EAoI7B,gBApI6B,EAqI7B,iBArI6B,EAsI7B,iBAtI6B,EAuI7B,gBAvI6B,EAwI7B,kBAxI6B,EAyI7B,cAzI6B,EA0I7B,wBA1I6B,EA2I7B,sBA3I6B,EA4I7B,sBA5I6B,EA6I7B,qBA7I6B,EA8I7B,oBA9I6B,EA+I7B,gBA/I6B,EAgJ7B,wBAhJ6B,EAiJ7B,wBAjJ6B,EAkJ7B,uBAlJ6B,EAmJ7B,sBAnJ6B,EAoJ7B,kBApJ6B,EAqJ7B,2BArJ6B,EAsJ7B,4BAtJ6B,EAuJ7B,0BAvJ6B,EAwJ7B,0BAxJ6B,EAyJ7B,yBAzJ6B,EA0J7B,4BA1J6B,EA2J7B,2BA3J6B,EA4J7B,QA5J6B,EA6J7B,cA7J6B,EA8J7B,uBA9J6B,EA+J7B,kCA/J6B,EAgK7B,iCAhK6B,EAiK7B,qBAjK6B,EAkK7B,iCAlK6B,EAmK7B,uBAnK6B,EAoK7B,qBApK6B,EAqK7B,sBArK6B,EAsK7B,qBAtK6B,EAuK7B,qBAvK6B,EAwK7B,qBAxK6B,EAyK7B,+BAzK6B,EA0K7B,uBA1K6B,EA2K7B,qCA3K6B,EA4K7B,+BA5K6B,EA6K7B,0BA7K6B,EA8K7B,yBA9K6B,EA+K7B,wBA/K6B,EAgL7B,uBAhL6B,EAiL7B,qBAjL6B,EAkL7B,+BAlL6B,EAmL7B,4BAnL6B,EAoL7B,+BApL6B,EAqL7B,6BArL6B,EAsL7B,qCAtL6B,EAuL7B,qCAvL6B,EAwL7B,oCAxL6B,EAyL7B,4BAzL6B,EA0L7B,qCA1L6B,EA2L7B,uCA3L6B,EA4L7B,MA5L6B,EA6L7B,SA7L6B,EA8L7B,iBA9L6B,EA+L7B,iBA/L6B,EAgM7B,gBAhM6B,EAiM7B,qBAjM6B,EAkM7B,mBAlM6B,EAmM7B,kBAnM6B,EAoM7B,qBApM6B,EAqM7B,YArM6B,EAsM7B,qBAtM6B,EAuM7B,oBAvM6B,EAwM7B,oBAxM6B,EAyM7B,mBAzM6B,EA0M7B,uBA1M6B,EA2M7B,uBA3M6B,EA4M7B,oBA5M6B,EA6M7B,wBA7M6B,EA8M7B,uBA9M6B,EA+M7B,qBA/M6B,EAgN7B,mBAhN6B,EAiN7B,gCAjN6B,EAkN7B,uCAlN6B,EAmN7B,wCAnN6B,EAoN7B,uCApN6B,EAqN7B,wCArN6B,EAsN7B,uBAtN6B,EAuN7B,+BAvN6B,EAwN7B,gCAxN6B,EAyN7B,gCAzN6B,EA0N7B,mBA1N6B,EA2N7B,6BA3N6B,EA4N7B,6BA5N6B,EA6N7B,2BA7N6B,EA8N7B,6BA9N6B,EA+N7B,aA/N6B,EAgO7B,uBAhO6B,EAiO7B,qBAjO6B,EAkO7B,oBAlO6B,EAmO7B,kCAnO6B,EAoO7B,iCApO6B,EAqO7B,0BArO6B,EAsO7B,MAtO6B,EAuO7B,cAvO6B,EAwO7B,cAxO6B,EAyO7B,QAzO6B,EA0O7B,MA1O6B,EA2O7B,0BA3O6B,EA4O7B,cA5O6B,EA6O7B,cA7O6B,EA8O7B,aA9O6B,EA+O7B,8BA/O6B,EAgP7B,UAhP6B,EAiP7B,OAjP6B,EAkP7B,eAlP6B,EAmP7B,mBAnP6B,EAoP7B,cApP6B,EAqP7B,wBArP6B,EAsP7B,0BAtP6B,EAuP7B,qBAvP6B,EAwP7B,cAxP6B,EAyP7B,wBAzP6B,EA0P7B,0BA1P6B,EA2P7B,wBA3P6B,CAA1B;;ACDP,SAASC,mBAAT,CAA6BC,KAA7B,EAAoCC,WAApC,EAAiDC,OAAjD,EAA0D;EACtD,IAAI,CAACF,KAAK,CAACG,KAAN,CAAYF,WAAZ,CAAL,EAA+B;IAC3BD,KAAK,CAACG,KAAN,CAAYF,WAAZ,IAA2B,EAA3B;;;EAEJD,KAAK,CAACG,KAAN,CAAYF,WAAZ,EAAyBG,IAAzB,CAA8BF,OAA9B;AACH;;AACD,AAAO,SAASG,UAAT,CAAoBL,KAApB,EAA2BM,kBAA3B,EAA+CJ,OAA/C,EAAwD;EAC3D,IAAIK,KAAK,CAACC,OAAN,CAAcF,kBAAd,CAAJ,EAAuC;IACnCA,kBAAkB,CAACG,OAAnB,CAA4BR,WAAD,IAAiBI,UAAU,CAACL,KAAD,EAAQC,WAAR,EAAqBC,OAArB,CAAtD;IACA;;;EAEJ,IAAI,CAAC,GAAD,EAAM,OAAN,EAAeQ,QAAf,CAAwBJ,kBAAxB,CAAJ,EAAiD;IAC7C,MAAML,WAAW,GAAGK,kBAAkB,KAAK,GAAvB,GAA6B,KAA7B,GAAqCA,kBAAzD;IACA,MAAMK,OAAO,GAAI,cAAaL,kBAAmB,+FAA8FL,WAAW,CAACW,MAAZ,CAAmB,CAAnB,EAAsBC,WAAtB,KAAsCZ,WAAW,CAACa,KAAZ,CAAkB,CAAlB,CAAqB,mBAA1M;IACA,MAAM,IAAIC,KAAJ,CAAUJ,OAAV,CAAN;;;EAEJ,IAAI,CAACb,iBAAiB,CAACY,QAAlB,CAA2BJ,kBAA3B,CAAL,EAAqD;IACjDN,KAAK,CAACgB,GAAN,CAAUtB,IAAV,CAAgB,IAAGY,kBAAmB,wFAAtC;;;EAEJP,mBAAmB,CAACC,KAAD,EAAQM,kBAAR,EAA4BJ,OAA5B,CAAnB;AACH;AACD,AAAO,SAASe,aAAT,CAAuBjB,KAAvB,EAA8BE,OAA9B,EAAuC;EAC1CH,mBAAmB,CAACC,KAAD,EAAQ,GAAR,EAAaE,OAAb,CAAnB;AACH;AACD,AAAO,SAASgB,eAAT,CAAyBlB,KAAzB,EAAgCE,OAAhC,EAAyC;EAC5CH,mBAAmB,CAACC,KAAD,EAAQ,OAAR,EAAiBE,OAAjB,CAAnB;AACH;;AC3BD;AACA;AACA;AACA,AAAO,SAASiB,gBAAT,CAA0BjB,OAA1B,EAAmCL,KAAnC,EAA0C;EAC7C,IAAIuB,WAAJ;;EACA,IAAI;IACAA,WAAW,GAAGlB,OAAO,CAACL,KAAD,CAArB;GADJ,CAGA,OAAOA,KAAP,EAAc;IACVF,OAAO,CAACqB,GAAR,CAAY,gDAAZ;IACArB,OAAO,CAACqB,GAAR,CAAYnB,KAAZ;;;EAEJ,IAAIuB,WAAW,IAAIA,WAAW,CAACC,KAA/B,EAAsC;IAClCD,WAAW,CAACC,KAAZ,CAAmBxB,KAAD,IAAW;MACzBF,OAAO,CAACqB,GAAR,CAAY,gDAAZ;MACArB,OAAO,CAACqB,GAAR,CAAYnB,KAAZ;KAFJ;;AAKP;;AClBD;AACA;AAEA,SAASyB,QAAT,CAAkBtB,KAAlB,EAAyBuB,kBAAzB,EAA6CC,SAA7C,EAAwD;EACpD,MAAMrB,KAAK,GAAG,CAACH,KAAK,CAACG,KAAN,CAAYqB,SAAZ,CAAD,EAAyBxB,KAAK,CAACG,KAAN,CAAY,GAAZ,CAAzB,CAAd;;EACA,IAAIoB,kBAAJ,EAAwB;IACpBpB,KAAK,CAACsB,OAAN,CAAczB,KAAK,CAACG,KAAN,CAAa,GAAEqB,SAAU,IAAGD,kBAAmB,EAA/C,CAAd;;;EAEJ,OAAO,GAAGG,MAAH,CAAU,GAAGvB,KAAK,CAACwB,MAAN,CAAaC,OAAb,CAAb,CAAP;AACH;;;AAED,AAAO,SAASC,cAAT,CAAwB7B,KAAxB,EAA+B8B,KAA/B,EAAsC;EACzC,MAAMC,aAAa,GAAG/B,KAAK,CAACG,KAAN,CAAYN,KAAZ,IAAqB,EAA3C;;EACA,IAAIiC,KAAK,YAAYf,KAArB,EAA4B;IACxB,MAAMlB,KAAK,GAAGmC,MAAM,CAACC,MAAP,CAAc,IAAIC,cAAJ,CAAmB,CAACJ,KAAD,CAAnB,CAAd,EAA2C;MACrDA,KADqD;MAErDK,MAAM,EAAE,CAACL,KAAD;KAFE,CAAd;IAIAC,aAAa,CAACtB,OAAd,CAAuBP,OAAD,IAAaiB,gBAAgB,CAACjB,OAAD,EAAUL,KAAV,CAAnD;IACA,OAAOuC,OAAO,CAACC,MAAR,CAAexC,KAAf,CAAP;;;EAEJ,IAAI,CAACiC,KAAD,IAAU,CAACA,KAAK,CAACQ,IAArB,EAA2B;IACvB,MAAM,IAAIJ,cAAJ,CAAmB,CAAC,uBAAD,CAAnB,CAAN;;;EAEJ,IAAI,CAACJ,KAAK,CAACS,OAAX,EAAoB;IAChB,MAAM,IAAIL,cAAJ,CAAmB,CAAC,0BAAD,CAAnB,CAAN;GAdqC;;;EAiBzC,MAAM/B,KAAK,GAAGmB,QAAQ,CAACtB,KAAD,EAAQ,YAAY8B,KAAK,CAACS,OAAlB,GAA4BT,KAAK,CAACS,OAAN,CAAcC,MAA1C,GAAmD,IAA3D,EAAiEV,KAAK,CAACQ,IAAvE,CAAtB;;EACA,IAAInC,KAAK,CAACsC,MAAN,KAAiB,CAArB,EAAwB;IACpB,OAAOL,OAAO,CAACM,OAAR,EAAP;;;EAEJ,MAAMP,MAAM,GAAG,EAAf;EACA,MAAMQ,QAAQ,GAAGxC,KAAK,CAACyC,GAAN,CAAW1C,OAAD,IAAa;IACpC,IAAI2C,OAAO,GAAGT,OAAO,CAACM,OAAR,CAAgBZ,KAAhB,CAAd;;IACA,IAAI9B,KAAK,CAAC8C,SAAV,EAAqB;MACjBD,OAAO,GAAGA,OAAO,CAACE,IAAR,CAAa/C,KAAK,CAAC8C,SAAnB,CAAV;;;IAEJ,OAAOD,OAAO,CACTE,IADE,CACIjB,KAAD,IAAW;MACjB,OAAO5B,OAAO,CAAC4B,KAAD,CAAd;KAFG,EAIFT,KAJE,CAIKxB,KAAD,IAAWsC,MAAM,CAAC/B,IAAP,CAAY4B,MAAM,CAACC,MAAP,CAAcpC,KAAd,EAAqB;MAAEiC;KAAvB,CAAZ,CAJf,CAAP;GALa,CAAjB;EAWA,OAAOM,OAAO,CAACY,GAAR,CAAYL,QAAZ,EAAsBI,IAAtB,CAA2B,MAAM;IACpC,IAAIZ,MAAM,CAACM,MAAP,KAAkB,CAAtB,EAAyB;MACrB;;;IAEJ,MAAM5C,KAAK,GAAG,IAAIqC,cAAJ,CAAmBC,MAAnB,CAAd;IACAH,MAAM,CAACC,MAAP,CAAcpC,KAAd,EAAqB;MACjBiC,KADiB;MAEjBK;KAFJ;IAIAJ,aAAa,CAACtB,OAAd,CAAuBP,OAAD,IAAaiB,gBAAgB,CAACjB,OAAD,EAAUL,KAAV,CAAnD;IACA,MAAMA,KAAN;GAVG,CAAP;AAYH;;ACxDM,SAASoD,cAAT,CAAwBjD,KAAxB,EAA+BM,kBAA/B,EAAmDJ,OAAnD,EAA4D;EAC/D,IAAIK,KAAK,CAACC,OAAN,CAAcF,kBAAd,CAAJ,EAAuC;IACnCA,kBAAkB,CAACG,OAAnB,CAA4BR,WAAD,IAAiBgD,cAAc,CAACjD,KAAD,EAAQC,WAAR,EAAqBC,OAArB,CAA1D;IACA;;;EAEJ,IAAI,CAACF,KAAK,CAACG,KAAN,CAAYG,kBAAZ,CAAL,EAAsC;IAClC;GAN2D;;;;EAU/D,KAAK,IAAI4C,CAAC,GAAGlD,KAAK,CAACG,KAAN,CAAYG,kBAAZ,EAAgCmC,MAAhC,GAAyC,CAAtD,EAAyDS,CAAC,IAAI,CAA9D,EAAiEA,CAAC,EAAlE,EAAsE;IAClE,IAAIlD,KAAK,CAACG,KAAN,CAAYG,kBAAZ,EAAgC4C,CAAhC,MAAuChD,OAA3C,EAAoD;MAChDF,KAAK,CAACG,KAAN,CAAYG,kBAAZ,EAAgC6C,MAAhC,CAAuCD,CAAvC,EAA0C,CAA1C;MACA;;;AAGX;;ACZM,SAASE,kBAAT,CAA4BC,OAA5B,EAAqC;EACxC,MAAMrD,KAAK,GAAG;IACVG,KAAK,EAAE,EADG;IAEVa,GAAG,EAAE1B,YAAY,CAAC+D,OAAO,IAAIA,OAAO,CAACrC,GAApB;GAFrB;;EAIA,IAAIqC,OAAO,IAAIA,OAAO,CAACP,SAAvB,EAAkC;IAC9B9C,KAAK,CAAC8C,SAAN,GAAkBO,OAAO,CAACP,SAA1B;;;EAEJ,OAAO;IACHQ,EAAE,EAAEA,UAAE,CAAC1D,IAAH,CAAQ,IAAR,EAAcI,KAAd,CADD;IAEHuD,KAAK,EAAEA,aAAK,CAAC3D,IAAN,CAAW,IAAX,EAAiBI,KAAjB,CAFJ;IAGHwD,OAAO,EAAEA,eAAO,CAAC5D,IAAR,CAAa,IAAb,EAAmBI,KAAnB,CAHN;IAIHiD,cAAc,EAAEA,cAAc,CAACrD,IAAf,CAAoB,IAApB,EAA0BI,KAA1B,CAJb;IAKHyD,OAAO,EAAEA,cAAO,CAAC7D,IAAR,CAAa,IAAb,EAAmBI,KAAnB;GALb;AAOH;;ACnBD;AACA;AACA;AACA,AAAO,SAAS0D,sBAAT,CAAgCnB,OAAhC,EAAyC;EAC5C,MAAMoB,aAAa,GAAGC,IAAI,CAACC,SAAL,CAAetB,OAAf,CAAtB;EACA,OAAOoB,aAAa,CAACG,OAAd,CAAsB,qBAAtB,EAA8CC,CAAD,IAAO;IACvD,OAAOA,CAAC,CAACC,MAAF,CAAS,CAAT,EAAY,CAAZ,IAAiBD,CAAC,CAACC,MAAF,CAAS,CAAT,EAAYnD,WAAZ,EAAxB;GADG,CAAP;AAGH;;ACNM,eAAeoD,IAAf,CAAoBC,MAApB,EAA4B3B,OAA5B,EAAqC;EACxC,OAAO4B,oBAAU,CAACD,MAAD,EAAS,OAAO3B,OAAP,KAAmB,QAAnB,GAA8BA,OAA9B,GAAwCmB,sBAAsB,CAACnB,OAAD,CAAvE,CAAjB;AACH;;ACFM,eAAe6B,MAAf,CAAsBF,MAAtB,EAA8B3B,OAA9B,EAAuC8B,SAAvC,EAAkD;EACrD,OAAOC,sBAAY,CAACJ,MAAD,EAAS,OAAO3B,OAAP,KAAmB,QAAnB,GAA8BA,OAA9B,GAAwCmB,sBAAsB,CAACnB,OAAD,CAAvE,EAAkF8B,SAAlF,CAAnB;AACH;;ACFM,eAAeE,gBAAf,CAAgCvE,KAAhC,EAAuC8B,KAAvC,EAA8C;;EAEjD,MAAM0C,gBAAgB,GAAG,MAAMJ,sBAAM,CAACpE,KAAK,CAACkE,MAAP,EAAe,OAAOpC,KAAK,CAACS,OAAb,KAAyB,QAAzB,GAC9CmB,sBAAsB,CAAC5B,KAAK,CAACS,OAAP,CADwB,GAE9CT,KAAK,CAACS,OAFyB,EAEhBT,KAAK,CAACuC,SAFU,CAArC;;EAGA,IAAI,CAACG,gBAAL,EAAuB;IACnB,MAAM3E,KAAK,GAAG,IAAIkB,KAAJ,CAAU,uEAAV,CAAd;IACA,OAAOf,KAAK,CAACyE,YAAN,CAAmBhB,OAAnB,CAA2BzB,MAAM,CAACC,MAAP,CAAcpC,KAAd,EAAqB;MAAEiC,KAAF;MAAS4C,MAAM,EAAE;KAAtC,CAA3B,CAAP;;;EAEJ,OAAO1E,KAAK,CAACyE,YAAN,CAAmBhB,OAAnB,CAA2B;IAC9BkB,EAAE,EAAE7C,KAAK,CAAC6C,EADoB;IAE9BrC,IAAI,EAAER,KAAK,CAACQ,IAFkB;IAG9BC,OAAO,EAAE,OAAOT,KAAK,CAACS,OAAb,KAAyB,QAAzB,GACHqB,IAAI,CAACgB,KAAL,CAAW9C,KAAK,CAACS,OAAjB,CADG,GAEHT,KAAK,CAACS;GALT,CAAP;AAOH;;AClBD,MAAMsC,eAAe,GAAG,CACpB,gBADoB,EAEpB,qBAFoB,EAGpB,mBAHoB,CAAxB;;AAMA,AAAO,SAASC,iBAAT,CAA2BC,OAA3B,EAAoC;EACvC,OAAOF,eAAe,CAAClD,MAAhB,CAAwBqD,MAAD,IAAY,EAAEA,MAAM,IAAID,OAAO,CAACE,OAApB,CAAnC,CAAP;AACH;;ACRD;AACA,AACO,SAASC,UAAT,CAAoBH,OAApB,EAA6B;;;EAGhC,IAAIA,OAAO,CAACI,IAAZ,EACI,OAAO/C,OAAO,CAACM,OAAR,CAAgBqC,OAAO,CAACI,IAAxB,CAAP;EACJ,OAAO,IAAI/C,OAAJ,CAAY,CAACM,OAAD,EAAUL,MAAV,KAAqB;IACpC,IAAI+C,IAAI,GAAG,EAAX;IACAL,OAAO,CAACM,WAAR,CAAoB,MAApB,EAFoC;;IAIpCN,OAAO,CAACzB,EAAR,CAAW,OAAX,EAAqBzD,KAAD,IAAWwC,MAAM,CAAC,IAAIH,cAAJ,CAAmB,CAACrC,KAAD,CAAnB,CAAD,CAArC;IACAkF,OAAO,CAACzB,EAAR,CAAW,MAAX,EAAoBgC,KAAD,IAAYF,IAAI,IAAIE,KAAvC;IACAP,OAAO,CAACzB,EAAR,CAAW,KAAX,EAAkB,MAAM;MACpB,IAAI;QACAZ,OAAO,CAACkB,IAAI,CAACgB,KAAL,CAAWQ,IAAX,CAAD,CAAP;OADJ,CAGA,OAAOvF,KAAP,EAAc;QACVA,KAAK,CAACc,OAAN,GAAgB,cAAhB;QACAd,KAAK,CAAC6E,MAAN,GAAe,GAAf;QACArC,MAAM,CAAC,IAAIH,cAAJ,CAAmB,CAACrC,KAAD,CAAnB,CAAD,CAAN;;KAPR;GANG,CAAP;AAiBH;;ACtBM,eAAe0F,UAAf,CAA0BC,QAA1B,EAAoCnC,OAApC,EAA6C0B,OAA7C,EAAsDU,QAAtD,EAAgEC,IAAhE,EAAsE;EACzE,IAAIC,QAAJ;;EACA,IAAI;IACAA,QAAQ,GAAG,IAAIC,GAAJ,CAAQb,OAAO,CAACc,GAAhB,EAAqB,kBAArB,EAAyCF,QAApD;GADJ,CAGA,OAAO9F,KAAP,EAAc;IACV4F,QAAQ,CAACK,SAAT,CAAmB,GAAnB,EAAwB;MACpB,gBAAgB;KADpB;IAGAL,QAAQ,CAACM,GAAT,CAAanC,IAAI,CAACC,SAAL,CAAe;MACxBhE,KAAK,EAAG,oCAAmCkF,OAAO,CAACc,GAAI;KAD9C,CAAb;IAGA;;;EAEJ,MAAMG,cAAc,GAAGjB,OAAO,CAACkB,MAAR,KAAmB,MAAnB,IAA6BN,QAAQ,KAAKtC,OAAO,CAAC6C,IAAzE;EACA,MAAMC,mBAAmB,GAAG,OAAOT,IAAP,KAAgB,UAA5C;;EACA,IAAIM,cAAJ,EAAoB;IAChB,IAAIG,mBAAJ,EAAyB;MACrB,OAAOT,IAAI,EAAX;KADJ,MAGK;MACD,OAAOrC,OAAO,CAAC+C,kBAAR,CAA2BrB,OAA3B,EAAoCU,QAApC,CAAP;;;;EAGR,MAAMY,cAAc,GAAGvB,iBAAiB,CAACC,OAAD,CAAjB,CAA2BuB,IAA3B,CAAgC,IAAhC,CAAvB;;EACA,IAAID,cAAJ,EAAoB;IAChBZ,QAAQ,CAACK,SAAT,CAAmB,GAAnB,EAAwB;MACpB,gBAAgB;KADpB;IAGAL,QAAQ,CAACM,GAAT,CAAanC,IAAI,CAACC,SAAL,CAAe;MACxBhE,KAAK,EAAG,6BAA4BwG,cAAe;KAD1C,CAAb;IAGA;;;EAEJ,MAAM7E,SAAS,GAAGuD,OAAO,CAACE,OAAR,CAAgB,gBAAhB,CAAlB;EACA,MAAMsB,eAAe,GAAGxB,OAAO,CAACE,OAAR,CAAgB,qBAAhB,CAAxB;EACA,MAAMN,EAAE,GAAGI,OAAO,CAACE,OAAR,CAAgB,mBAAhB,CAAX;EACA5B,OAAO,CAACrC,GAAR,CAAYxB,KAAZ,CAAmB,GAAEgC,SAAU,wBAAuBmD,EAAG,GAAzD,EArCyE;;;EAwCzE,IAAI6B,UAAU,GAAG,KAAjB;EACA,MAAMC,OAAO,GAAGC,UAAU,CAAC,MAAM;IAC7BF,UAAU,GAAG,IAAb;IACAf,QAAQ,CAACkB,UAAT,GAAsB,GAAtB;IACAlB,QAAQ,CAACM,GAAT,CAAa,oBAAb;GAHsB,EAIvB,IAJuB,CAAV,CAIPa,KAJO,EAAhB;;EAKA,IAAI;IACA,MAAMrE,OAAO,GAAG,MAAM2C,UAAU,CAACH,OAAD,CAAhC;IACA,MAAMS,QAAQ,CAACjB,gBAAT,CAA0B;MAC5BI,EAAE,EAAEA,EADwB;MAE5BrC,IAAI,EAAEd,SAFsB;MAG5Be,OAAO,EAAEA,OAHmB;MAI5B8B,SAAS,EAAEkC;KAJT,CAAN;IAMAM,YAAY,CAACJ,OAAD,CAAZ;IACA,IAAID,UAAJ,EACI;IACJf,QAAQ,CAACM,GAAT,CAAa,MAAb;GAXJ,CAaA,OAAOlG,KAAP,EAAc;IACVgH,YAAY,CAACJ,OAAD,CAAZ;IACA,IAAID,UAAJ,EACI;IACJ,MAAMM,GAAG,GAAGvG,KAAK,CAACwG,IAAN,CAAWlH,KAAX,EAAkB,CAAlB,CAAZ;IACA,MAAMmH,YAAY,GAAGF,GAAG,CAACnG,OAAJ,GACd,GAAEmG,GAAG,CAACxE,IAAK,KAAIwE,GAAG,CAACnG,OAAQ,EADb,GAEf,sCAFN;IAGA8E,QAAQ,CAACkB,UAAT,GAAsB,OAAOG,GAAG,CAACpC,MAAX,KAAsB,WAAtB,GAAoCoC,GAAG,CAACpC,MAAxC,GAAiD,GAAvE;IACArB,OAAO,CAACrC,GAAR,CAAYnB,KAAZ,CAAkBA,KAAlB;IACA4F,QAAQ,CAACM,GAAT,CAAanC,IAAI,CAACC,SAAL,CAAe;MACxBhE,KAAK,EAAEmH;KADE,CAAb;;AAIP;;AC3EM,SAASC,yBAAT,CAAmClC,OAAnC,EAA4CU,QAA5C,EAAsD;EACzDA,QAAQ,CAACK,SAAT,CAAmB,GAAnB,EAAwB;IACpB,gBAAgB;GADpB;EAGAL,QAAQ,CAACM,GAAT,CAAanC,IAAI,CAACC,SAAL,CAAe;IACxBhE,KAAK,EAAG,kBAAiBkF,OAAO,CAACkB,MAAO,IAAGlB,OAAO,CAACc,GAAI;GAD9C,CAAb;AAGH;;ACJM,SAASqB,oBAAT,CAA8B1B,QAA9B,EAAwC;EAAEU,IAAI,GAAG,sBAAT;EAAiCE,kBAAkB,GAAGa,yBAAtD;EAAiFjG,GAAG,GAAG1B,YAAY;AAAnG,IAA2G,EAAnJ,EAAuJ;EAC1J,OAAOiG,UAAU,CAAC3F,IAAX,CAAgB,IAAhB,EAAsB4F,QAAtB,EAAgC;IACnCU,IADmC;IAEnCE,kBAFmC;IAGnCpF;GAHG,CAAP;AAKH;;ACDD,MAAMmG,QAAN,CAAe;EACXC,WAAW,CAAC/D,OAAD,EAAU;IACjB,IAAI,CAACA,OAAD,IAAY,CAACA,OAAO,CAACa,MAAzB,EAAiC;MAC7B,MAAM,IAAInD,KAAJ,CAAU,6CAAV,CAAN;;;IAEJ,MAAMf,KAAK,GAAG;MACVyE,YAAY,EAAErB,kBAAkB,CAACC,OAAD,CADtB;MAEVa,MAAM,EAAEb,OAAO,CAACa,MAFN;MAGV/D,KAAK,EAAE,EAHG;MAIVa,GAAG,EAAE1B,YAAY,CAAC+D,OAAO,CAACrC,GAAT;KAJrB;IAMA,KAAKiD,IAAL,GAAYA,IAAI,CAACrE,IAAL,CAAU,IAAV,EAAgByD,OAAO,CAACa,MAAxB,CAAZ;IACA,KAAKE,MAAL,GAAcA,MAAM,CAACxE,IAAP,CAAY,IAAZ,EAAkByD,OAAO,CAACa,MAA1B,CAAd;IACA,KAAKZ,EAAL,GAAUtD,KAAK,CAACyE,YAAN,CAAmBnB,EAA7B;IACA,KAAKC,KAAL,GAAavD,KAAK,CAACyE,YAAN,CAAmBlB,KAAhC;IACA,KAAKC,OAAL,GAAexD,KAAK,CAACyE,YAAN,CAAmBjB,OAAlC;IACA,KAAKP,cAAL,GAAsBjD,KAAK,CAACyE,YAAN,CAAmBxB,cAAzC;IACA,KAAKQ,OAAL,GAAezD,KAAK,CAACyE,YAAN,CAAmBhB,OAAlC;IACA,KAAKc,gBAAL,GAAwBA,gBAAgB,CAAC3E,IAAjB,CAAsB,IAAtB,EAA4BI,KAA5B,CAAxB;;;AAlBO;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/webhooks/dist-src/createLogger.js b/node_modules/@octokit/webhooks/dist-src/createLogger.js new file mode 100644 index 0000000..9fbb6b9 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/createLogger.js @@ -0,0 +1,7 @@ +export const createLogger = (logger) => ({ + debug: () => { }, + info: () => { }, + warn: console.warn.bind(console), + error: console.error.bind(console), + ...logger, +}); diff --git a/node_modules/@octokit/webhooks/dist-src/event-handler/index.js b/node_modules/@octokit/webhooks/dist-src/event-handler/index.js new file mode 100644 index 0000000..dd7e764 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/event-handler/index.js @@ -0,0 +1,20 @@ +import { createLogger } from "../createLogger"; +import { receiverOn as on, receiverOnAny as onAny, receiverOnError as onError, } from "./on"; +import { receiverHandle as receive } from "./receive"; +import { removeListener } from "./remove-listener"; +export function createEventHandler(options) { + const state = { + hooks: {}, + log: createLogger(options && options.log), + }; + if (options && options.transform) { + state.transform = options.transform; + } + return { + on: on.bind(null, state), + onAny: onAny.bind(null, state), + onError: onError.bind(null, state), + removeListener: removeListener.bind(null, state), + receive: receive.bind(null, state), + }; +} diff --git a/node_modules/@octokit/webhooks/dist-src/event-handler/on.js b/node_modules/@octokit/webhooks/dist-src/event-handler/on.js new file mode 100644 index 0000000..258bf99 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/event-handler/on.js @@ -0,0 +1,28 @@ +import { emitterEventNames } from "../generated/webhook-names"; +function handleEventHandlers(state, webhookName, handler) { + if (!state.hooks[webhookName]) { + state.hooks[webhookName] = []; + } + state.hooks[webhookName].push(handler); +} +export function receiverOn(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach((webhookName) => receiverOn(state, webhookName, handler)); + return; + } + if (["*", "error"].includes(webhookNameOrNames)) { + const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; + const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; + throw new Error(message); + } + if (!emitterEventNames.includes(webhookNameOrNames)) { + state.log.warn(`"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`); + } + handleEventHandlers(state, webhookNameOrNames, handler); +} +export function receiverOnAny(state, handler) { + handleEventHandlers(state, "*", handler); +} +export function receiverOnError(state, handler) { + handleEventHandlers(state, "error", handler); +} diff --git a/node_modules/@octokit/webhooks/dist-src/event-handler/receive.js b/node_modules/@octokit/webhooks/dist-src/event-handler/receive.js new file mode 100644 index 0000000..8d54d55 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/event-handler/receive.js @@ -0,0 +1,57 @@ +// @ts-ignore to address #245 +import AggregateError from "aggregate-error"; +import { wrapErrorHandler } from "./wrap-error-handler"; +function getHooks(state, eventPayloadAction, eventName) { + const hooks = [state.hooks[eventName], state.hooks["*"]]; + if (eventPayloadAction) { + hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); + } + return [].concat(...hooks.filter(Boolean)); +} +// main handler function +export function receiverHandle(state, event) { + const errorHandlers = state.hooks.error || []; + if (event instanceof Error) { + const error = Object.assign(new AggregateError([event]), { + event, + errors: [event], + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + return Promise.reject(error); + } + if (!event || !event.name) { + throw new AggregateError(["Event name not passed"]); + } + if (!event.payload) { + throw new AggregateError(["Event payload not passed"]); + } + // flatten arrays of event listeners and remove undefined values + const hooks = getHooks(state, "action" in event.payload ? event.payload.action : null, event.name); + if (hooks.length === 0) { + return Promise.resolve(); + } + const errors = []; + const promises = hooks.map((handler) => { + let promise = Promise.resolve(event); + if (state.transform) { + promise = promise.then(state.transform); + } + return promise + .then((event) => { + return handler(event); + }) + .catch((error) => errors.push(Object.assign(error, { event }))); + }); + return Promise.all(promises).then(() => { + if (errors.length === 0) { + return; + } + const error = new AggregateError(errors); + Object.assign(error, { + event, + errors, + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + throw error; + }); +} diff --git a/node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js b/node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js new file mode 100644 index 0000000..cb558ab --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/event-handler/remove-listener.js @@ -0,0 +1,17 @@ +export function removeListener(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach((webhookName) => removeListener(state, webhookName, handler)); + return; + } + if (!state.hooks[webhookNameOrNames]) { + return; + } + // remove last hook that has been added, that way + // it behaves the same as removeListener + for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { + if (state.hooks[webhookNameOrNames][i] === handler) { + state.hooks[webhookNameOrNames].splice(i, 1); + return; + } + } +} diff --git a/node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js b/node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js new file mode 100644 index 0000000..29bc6ef --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/event-handler/wrap-error-handler.js @@ -0,0 +1,19 @@ +// Errors thrown or rejected Promises in "error" event handlers are not handled +// as they are in the webhook event handlers. If errors occur, we log a +// "Fatal: Error occurred" message to stdout +export function wrapErrorHandler(handler, error) { + let returnValue; + try { + returnValue = handler(error); + } + catch (error) { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error); + } + if (returnValue && returnValue.catch) { + returnValue.catch((error) => { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error); + }); + } +} diff --git a/node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js b/node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js new file mode 100644 index 0000000..61ef242 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/generated/webhook-names.js @@ -0,0 +1,255 @@ +// THIS FILE IS GENERATED - DO NOT EDIT DIRECTLY +// make edits in scripts/generate-types.ts +export const emitterEventNames = [ + "branch_protection_rule", + "branch_protection_rule.created", + "branch_protection_rule.deleted", + "branch_protection_rule.edited", + "check_run", + "check_run.completed", + "check_run.created", + "check_run.requested_action", + "check_run.rerequested", + "check_suite", + "check_suite.completed", + "check_suite.requested", + "check_suite.rerequested", + "code_scanning_alert", + "code_scanning_alert.appeared_in_branch", + "code_scanning_alert.closed_by_user", + "code_scanning_alert.created", + "code_scanning_alert.fixed", + "code_scanning_alert.reopened", + "code_scanning_alert.reopened_by_user", + "commit_comment", + "commit_comment.created", + "create", + "delete", + "dependabot_alert", + "dependabot_alert.created", + "dependabot_alert.dismissed", + "dependabot_alert.fixed", + "dependabot_alert.reintroduced", + "dependabot_alert.reopened", + "deploy_key", + "deploy_key.created", + "deploy_key.deleted", + "deployment", + "deployment.created", + "deployment_status", + "deployment_status.created", + "discussion", + "discussion.answered", + "discussion.category_changed", + "discussion.created", + "discussion.deleted", + "discussion.edited", + "discussion.labeled", + "discussion.locked", + "discussion.pinned", + "discussion.transferred", + "discussion.unanswered", + "discussion.unlabeled", + "discussion.unlocked", + "discussion.unpinned", + "discussion_comment", + "discussion_comment.created", + "discussion_comment.deleted", + "discussion_comment.edited", + "fork", + "github_app_authorization", + "github_app_authorization.revoked", + "gollum", + "installation", + "installation.created", + "installation.deleted", + "installation.new_permissions_accepted", + "installation.suspend", + "installation.unsuspend", + "installation_repositories", + "installation_repositories.added", + "installation_repositories.removed", + "issue_comment", + "issue_comment.created", + "issue_comment.deleted", + "issue_comment.edited", + "issues", + "issues.assigned", + "issues.closed", + "issues.deleted", + "issues.demilestoned", + "issues.edited", + "issues.labeled", + "issues.locked", + "issues.milestoned", + "issues.opened", + "issues.pinned", + "issues.reopened", + "issues.transferred", + "issues.unassigned", + "issues.unlabeled", + "issues.unlocked", + "issues.unpinned", + "label", + "label.created", + "label.deleted", + "label.edited", + "marketplace_purchase", + "marketplace_purchase.cancelled", + "marketplace_purchase.changed", + "marketplace_purchase.pending_change", + "marketplace_purchase.pending_change_cancelled", + "marketplace_purchase.purchased", + "member", + "member.added", + "member.edited", + "member.removed", + "membership", + "membership.added", + "membership.removed", + "merge_group", + "merge_group.checks_requested", + "meta", + "meta.deleted", + "milestone", + "milestone.closed", + "milestone.created", + "milestone.deleted", + "milestone.edited", + "milestone.opened", + "org_block", + "org_block.blocked", + "org_block.unblocked", + "organization", + "organization.deleted", + "organization.member_added", + "organization.member_invited", + "organization.member_removed", + "organization.renamed", + "package", + "package.published", + "package.updated", + "page_build", + "ping", + "project", + "project.closed", + "project.created", + "project.deleted", + "project.edited", + "project.reopened", + "project_card", + "project_card.converted", + "project_card.created", + "project_card.deleted", + "project_card.edited", + "project_card.moved", + "project_column", + "project_column.created", + "project_column.deleted", + "project_column.edited", + "project_column.moved", + "projects_v2_item", + "projects_v2_item.archived", + "projects_v2_item.converted", + "projects_v2_item.created", + "projects_v2_item.deleted", + "projects_v2_item.edited", + "projects_v2_item.reordered", + "projects_v2_item.restored", + "public", + "pull_request", + "pull_request.assigned", + "pull_request.auto_merge_disabled", + "pull_request.auto_merge_enabled", + "pull_request.closed", + "pull_request.converted_to_draft", + "pull_request.dequeued", + "pull_request.edited", + "pull_request.labeled", + "pull_request.locked", + "pull_request.opened", + "pull_request.queued", + "pull_request.ready_for_review", + "pull_request.reopened", + "pull_request.review_request_removed", + "pull_request.review_requested", + "pull_request.synchronize", + "pull_request.unassigned", + "pull_request.unlabeled", + "pull_request.unlocked", + "pull_request_review", + "pull_request_review.dismissed", + "pull_request_review.edited", + "pull_request_review.submitted", + "pull_request_review_comment", + "pull_request_review_comment.created", + "pull_request_review_comment.deleted", + "pull_request_review_comment.edited", + "pull_request_review_thread", + "pull_request_review_thread.resolved", + "pull_request_review_thread.unresolved", + "push", + "release", + "release.created", + "release.deleted", + "release.edited", + "release.prereleased", + "release.published", + "release.released", + "release.unpublished", + "repository", + "repository.archived", + "repository.created", + "repository.deleted", + "repository.edited", + "repository.privatized", + "repository.publicized", + "repository.renamed", + "repository.transferred", + "repository.unarchived", + "repository_dispatch", + "repository_import", + "repository_vulnerability_alert", + "repository_vulnerability_alert.create", + "repository_vulnerability_alert.dismiss", + "repository_vulnerability_alert.reopen", + "repository_vulnerability_alert.resolve", + "secret_scanning_alert", + "secret_scanning_alert.created", + "secret_scanning_alert.reopened", + "secret_scanning_alert.resolved", + "security_advisory", + "security_advisory.performed", + "security_advisory.published", + "security_advisory.updated", + "security_advisory.withdrawn", + "sponsorship", + "sponsorship.cancelled", + "sponsorship.created", + "sponsorship.edited", + "sponsorship.pending_cancellation", + "sponsorship.pending_tier_change", + "sponsorship.tier_changed", + "star", + "star.created", + "star.deleted", + "status", + "team", + "team.added_to_repository", + "team.created", + "team.deleted", + "team.edited", + "team.removed_from_repository", + "team_add", + "watch", + "watch.started", + "workflow_dispatch", + "workflow_job", + "workflow_job.completed", + "workflow_job.in_progress", + "workflow_job.queued", + "workflow_run", + "workflow_run.completed", + "workflow_run.in_progress", + "workflow_run.requested", +]; diff --git a/node_modules/@octokit/webhooks/dist-src/index.js b/node_modules/@octokit/webhooks/dist-src/index.js new file mode 100644 index 0000000..6995206 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/index.js @@ -0,0 +1,30 @@ +import { createLogger } from "./createLogger"; +import { createEventHandler } from "./event-handler/index"; +import { sign } from "./sign"; +import { verify } from "./verify"; +import { verifyAndReceive } from "./verify-and-receive"; +export { createNodeMiddleware } from "./middleware/node/index"; +export { emitterEventNames } from "./generated/webhook-names"; +// U holds the return value of `transform` function in Options +class Webhooks { + constructor(options) { + if (!options || !options.secret) { + throw new Error("[@octokit/webhooks] options.secret required"); + } + const state = { + eventHandler: createEventHandler(options), + secret: options.secret, + hooks: {}, + log: createLogger(options.log), + }; + this.sign = sign.bind(null, options.secret); + this.verify = verify.bind(null, options.secret); + this.on = state.eventHandler.on; + this.onAny = state.eventHandler.onAny; + this.onError = state.eventHandler.onError; + this.removeListener = state.eventHandler.removeListener; + this.receive = state.eventHandler.receive; + this.verifyAndReceive = verifyAndReceive.bind(null, state); + } +} +export { createEventHandler, Webhooks, }; diff --git a/node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js b/node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js new file mode 100644 index 0000000..d2a2afc --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/middleware/node/get-missing-headers.js @@ -0,0 +1,9 @@ +const WEBHOOK_HEADERS = [ + "x-github-event", + "x-hub-signature-256", + "x-github-delivery", +]; +// https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#delivery-headers +export function getMissingHeaders(request) { + return WEBHOOK_HEADERS.filter((header) => !(header in request.headers)); +} diff --git a/node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js b/node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js new file mode 100644 index 0000000..ddddd79 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/middleware/node/get-payload.js @@ -0,0 +1,25 @@ +// @ts-ignore to address #245 +import AggregateError from "aggregate-error"; +export function getPayload(request) { + // If request.body already exists we can stop here + // See https://github.com/octokit/webhooks.js/pull/23 + if (request.body) + return Promise.resolve(request.body); + return new Promise((resolve, reject) => { + let data = ""; + request.setEncoding("utf8"); + // istanbul ignore next + request.on("error", (error) => reject(new AggregateError([error]))); + request.on("data", (chunk) => (data += chunk)); + request.on("end", () => { + try { + resolve(JSON.parse(data)); + } + catch (error) { + error.message = "Invalid JSON"; + error.status = 400; + reject(new AggregateError([error])); + } + }); + }); +} diff --git a/node_modules/@octokit/webhooks/dist-src/middleware/node/index.js b/node_modules/@octokit/webhooks/dist-src/middleware/node/index.js new file mode 100644 index 0000000..fc17449 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/middleware/node/index.js @@ -0,0 +1,10 @@ +import { createLogger } from "../../createLogger"; +import { middleware } from "./middleware"; +import { onUnhandledRequestDefault } from "./on-unhandled-request-default"; +export function createNodeMiddleware(webhooks, { path = "/api/github/webhooks", onUnhandledRequest = onUnhandledRequestDefault, log = createLogger(), } = {}) { + return middleware.bind(null, webhooks, { + path, + onUnhandledRequest, + log, + }); +} diff --git a/node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js b/node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js new file mode 100644 index 0000000..26c99bb --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/middleware/node/middleware.js @@ -0,0 +1,76 @@ +import { getMissingHeaders } from "./get-missing-headers"; +import { getPayload } from "./get-payload"; +export async function middleware(webhooks, options, request, response, next) { + let pathname; + try { + pathname = new URL(request.url, "http://localhost").pathname; + } + catch (error) { + response.writeHead(422, { + "content-type": "application/json", + }); + response.end(JSON.stringify({ + error: `Request URL could not be parsed: ${request.url}`, + })); + return; + } + const isUnknownRoute = request.method !== "POST" || pathname !== options.path; + const isExpressMiddleware = typeof next === "function"; + if (isUnknownRoute) { + if (isExpressMiddleware) { + return next(); + } + else { + return options.onUnhandledRequest(request, response); + } + } + const missingHeaders = getMissingHeaders(request).join(", "); + if (missingHeaders) { + response.writeHead(400, { + "content-type": "application/json", + }); + response.end(JSON.stringify({ + error: `Required headers missing: ${missingHeaders}`, + })); + return; + } + const eventName = request.headers["x-github-event"]; + const signatureSHA256 = request.headers["x-hub-signature-256"]; + const id = request.headers["x-github-delivery"]; + options.log.debug(`${eventName} event received (id: ${id})`); + // GitHub will abort the request if it does not receive a response within 10s + // See https://github.com/octokit/webhooks.js/issues/185 + let didTimeout = false; + const timeout = setTimeout(() => { + didTimeout = true; + response.statusCode = 202; + response.end("still processing\n"); + }, 9000).unref(); + try { + const payload = await getPayload(request); + await webhooks.verifyAndReceive({ + id: id, + name: eventName, + payload: payload, + signature: signatureSHA256, + }); + clearTimeout(timeout); + if (didTimeout) + return; + response.end("ok\n"); + } + catch (error) { + clearTimeout(timeout); + if (didTimeout) + return; + const err = Array.from(error)[0]; + const errorMessage = err.message + ? `${err.name}: ${err.message}` + : "Error: An Unspecified error occurred"; + response.statusCode = typeof err.status !== "undefined" ? err.status : 500; + options.log.error(error); + response.end(JSON.stringify({ + error: errorMessage, + })); + } +} diff --git a/node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js b/node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js new file mode 100644 index 0000000..bd1a464 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/middleware/node/on-unhandled-request-default.js @@ -0,0 +1,8 @@ +export function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json", + }); + response.end(JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}`, + })); +} diff --git a/node_modules/@octokit/webhooks/dist-src/middleware/node/types.js b/node_modules/@octokit/webhooks/dist-src/middleware/node/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/middleware/node/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/webhooks/dist-src/sign.js b/node_modules/@octokit/webhooks/dist-src/sign.js new file mode 100644 index 0000000..ebd265e --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/sign.js @@ -0,0 +1,5 @@ +import { sign as signMethod } from "@octokit/webhooks-methods"; +import { toNormalizedJsonString } from "./to-normalized-json-string"; +export async function sign(secret, payload) { + return signMethod(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload)); +} diff --git a/node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js b/node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js new file mode 100644 index 0000000..09413b5 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/to-normalized-json-string.js @@ -0,0 +1,9 @@ +/** + * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end + */ +export function toNormalizedJsonString(payload) { + const payloadString = JSON.stringify(payload); + return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, (s) => { + return s.substr(0, 3) + s.substr(3).toUpperCase(); + }); +} diff --git a/node_modules/@octokit/webhooks/dist-src/types.js b/node_modules/@octokit/webhooks/dist-src/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@octokit/webhooks/dist-src/verify-and-receive.js b/node_modules/@octokit/webhooks/dist-src/verify-and-receive.js new file mode 100644 index 0000000..07bdc59 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/verify-and-receive.js @@ -0,0 +1,19 @@ +import { verify } from "@octokit/webhooks-methods"; +import { toNormalizedJsonString } from "./to-normalized-json-string"; +export async function verifyAndReceive(state, event) { + // verify will validate that the secret is not undefined + const matchesSignature = await verify(state.secret, typeof event.payload === "object" + ? toNormalizedJsonString(event.payload) + : event.payload, event.signature); + if (!matchesSignature) { + const error = new Error("[@octokit/webhooks] signature does not match event payload and secret"); + return state.eventHandler.receive(Object.assign(error, { event, status: 400 })); + } + return state.eventHandler.receive({ + id: event.id, + name: event.name, + payload: typeof event.payload === "string" + ? JSON.parse(event.payload) + : event.payload, + }); +} diff --git a/node_modules/@octokit/webhooks/dist-src/verify.js b/node_modules/@octokit/webhooks/dist-src/verify.js new file mode 100644 index 0000000..f5679c5 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-src/verify.js @@ -0,0 +1,5 @@ +import { verify as verifyMethod } from "@octokit/webhooks-methods"; +import { toNormalizedJsonString } from "./to-normalized-json-string"; +export async function verify(secret, payload, signature) { + return verifyMethod(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload), signature); +} diff --git a/node_modules/@octokit/webhooks/dist-types/createLogger.d.ts b/node_modules/@octokit/webhooks/dist-types/createLogger.d.ts new file mode 100644 index 0000000..c0c97dc --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/createLogger.d.ts @@ -0,0 +1,7 @@ +export interface Logger { + debug: (...data: any[]) => void; + info: (...data: any[]) => void; + warn: (...data: any[]) => void; + error: (...data: any[]) => void; +} +export declare const createLogger: (logger?: Partial) => Logger; diff --git a/node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts b/node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts new file mode 100644 index 0000000..82ebcfa --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/event-handler/index.d.ts @@ -0,0 +1,10 @@ +import type { EmitterWebhookEvent, EmitterWebhookEventName, HandlerFunction, Options, WebhookEventHandlerError } from "../types"; +interface EventHandler { + on(event: E | E[], callback: HandlerFunction): void; + onAny(handler: (event: EmitterWebhookEvent) => any): void; + onError(handler: (event: WebhookEventHandlerError) => any): void; + removeListener(event: E | E[], callback: HandlerFunction): void; + receive(event: EmitterWebhookEvent): Promise; +} +export declare function createEventHandler(options: Options): EventHandler; +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts b/node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts new file mode 100644 index 0000000..f8439f8 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/event-handler/on.d.ts @@ -0,0 +1,4 @@ +import { EmitterWebhookEvent, EmitterWebhookEventName, State, WebhookEventHandlerError } from "../types"; +export declare function receiverOn(state: State, webhookNameOrNames: EmitterWebhookEventName | EmitterWebhookEventName[], handler: Function): void; +export declare function receiverOnAny(state: State, handler: (event: EmitterWebhookEvent) => any): void; +export declare function receiverOnError(state: State, handler: (event: WebhookEventHandlerError) => any): void; diff --git a/node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts b/node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts new file mode 100644 index 0000000..1894090 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/event-handler/receive.d.ts @@ -0,0 +1,2 @@ +import type { EmitterWebhookEvent, State } from "../types"; +export declare function receiverHandle(state: State, event: EmitterWebhookEvent): Promise; diff --git a/node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts b/node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts new file mode 100644 index 0000000..1c59a99 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/event-handler/remove-listener.d.ts @@ -0,0 +1,2 @@ +import { EmitterWebhookEventName, State } from "../types"; +export declare function removeListener(state: State, webhookNameOrNames: "*" | EmitterWebhookEventName | EmitterWebhookEventName[], handler: Function): void; diff --git a/node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts b/node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts new file mode 100644 index 0000000..1abd902 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/event-handler/wrap-error-handler.d.ts @@ -0,0 +1 @@ +export declare function wrapErrorHandler(handler: Function, error: Error): void; diff --git a/node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts b/node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts new file mode 100644 index 0000000..7b8489f --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/generated/webhook-names.d.ts @@ -0,0 +1 @@ +export declare const emitterEventNames: readonly ["branch_protection_rule", "branch_protection_rule.created", "branch_protection_rule.deleted", "branch_protection_rule.edited", "check_run", "check_run.completed", "check_run.created", "check_run.requested_action", "check_run.rerequested", "check_suite", "check_suite.completed", "check_suite.requested", "check_suite.rerequested", "code_scanning_alert", "code_scanning_alert.appeared_in_branch", "code_scanning_alert.closed_by_user", "code_scanning_alert.created", "code_scanning_alert.fixed", "code_scanning_alert.reopened", "code_scanning_alert.reopened_by_user", "commit_comment", "commit_comment.created", "create", "delete", "dependabot_alert", "dependabot_alert.created", "dependabot_alert.dismissed", "dependabot_alert.fixed", "dependabot_alert.reintroduced", "dependabot_alert.reopened", "deploy_key", "deploy_key.created", "deploy_key.deleted", "deployment", "deployment.created", "deployment_status", "deployment_status.created", "discussion", "discussion.answered", "discussion.category_changed", "discussion.created", "discussion.deleted", "discussion.edited", "discussion.labeled", "discussion.locked", "discussion.pinned", "discussion.transferred", "discussion.unanswered", "discussion.unlabeled", "discussion.unlocked", "discussion.unpinned", "discussion_comment", "discussion_comment.created", "discussion_comment.deleted", "discussion_comment.edited", "fork", "github_app_authorization", "github_app_authorization.revoked", "gollum", "installation", "installation.created", "installation.deleted", "installation.new_permissions_accepted", "installation.suspend", "installation.unsuspend", "installation_repositories", "installation_repositories.added", "installation_repositories.removed", "issue_comment", "issue_comment.created", "issue_comment.deleted", "issue_comment.edited", "issues", "issues.assigned", "issues.closed", "issues.deleted", "issues.demilestoned", "issues.edited", "issues.labeled", "issues.locked", "issues.milestoned", "issues.opened", "issues.pinned", "issues.reopened", "issues.transferred", "issues.unassigned", "issues.unlabeled", "issues.unlocked", "issues.unpinned", "label", "label.created", "label.deleted", "label.edited", "marketplace_purchase", "marketplace_purchase.cancelled", "marketplace_purchase.changed", "marketplace_purchase.pending_change", "marketplace_purchase.pending_change_cancelled", "marketplace_purchase.purchased", "member", "member.added", "member.edited", "member.removed", "membership", "membership.added", "membership.removed", "merge_group", "merge_group.checks_requested", "meta", "meta.deleted", "milestone", "milestone.closed", "milestone.created", "milestone.deleted", "milestone.edited", "milestone.opened", "org_block", "org_block.blocked", "org_block.unblocked", "organization", "organization.deleted", "organization.member_added", "organization.member_invited", "organization.member_removed", "organization.renamed", "package", "package.published", "package.updated", "page_build", "ping", "project", "project.closed", "project.created", "project.deleted", "project.edited", "project.reopened", "project_card", "project_card.converted", "project_card.created", "project_card.deleted", "project_card.edited", "project_card.moved", "project_column", "project_column.created", "project_column.deleted", "project_column.edited", "project_column.moved", "projects_v2_item", "projects_v2_item.archived", "projects_v2_item.converted", "projects_v2_item.created", "projects_v2_item.deleted", "projects_v2_item.edited", "projects_v2_item.reordered", "projects_v2_item.restored", "public", "pull_request", "pull_request.assigned", "pull_request.auto_merge_disabled", "pull_request.auto_merge_enabled", "pull_request.closed", "pull_request.converted_to_draft", "pull_request.dequeued", "pull_request.edited", "pull_request.labeled", "pull_request.locked", "pull_request.opened", "pull_request.queued", "pull_request.ready_for_review", "pull_request.reopened", "pull_request.review_request_removed", "pull_request.review_requested", "pull_request.synchronize", "pull_request.unassigned", "pull_request.unlabeled", "pull_request.unlocked", "pull_request_review", "pull_request_review.dismissed", "pull_request_review.edited", "pull_request_review.submitted", "pull_request_review_comment", "pull_request_review_comment.created", "pull_request_review_comment.deleted", "pull_request_review_comment.edited", "pull_request_review_thread", "pull_request_review_thread.resolved", "pull_request_review_thread.unresolved", "push", "release", "release.created", "release.deleted", "release.edited", "release.prereleased", "release.published", "release.released", "release.unpublished", "repository", "repository.archived", "repository.created", "repository.deleted", "repository.edited", "repository.privatized", "repository.publicized", "repository.renamed", "repository.transferred", "repository.unarchived", "repository_dispatch", "repository_import", "repository_vulnerability_alert", "repository_vulnerability_alert.create", "repository_vulnerability_alert.dismiss", "repository_vulnerability_alert.reopen", "repository_vulnerability_alert.resolve", "secret_scanning_alert", "secret_scanning_alert.created", "secret_scanning_alert.reopened", "secret_scanning_alert.resolved", "security_advisory", "security_advisory.performed", "security_advisory.published", "security_advisory.updated", "security_advisory.withdrawn", "sponsorship", "sponsorship.cancelled", "sponsorship.created", "sponsorship.edited", "sponsorship.pending_cancellation", "sponsorship.pending_tier_change", "sponsorship.tier_changed", "star", "star.created", "star.deleted", "status", "team", "team.added_to_repository", "team.created", "team.deleted", "team.edited", "team.removed_from_repository", "team_add", "watch", "watch.started", "workflow_dispatch", "workflow_job", "workflow_job.completed", "workflow_job.in_progress", "workflow_job.queued", "workflow_run", "workflow_run.completed", "workflow_run.in_progress", "workflow_run.requested"]; diff --git a/node_modules/@octokit/webhooks/dist-types/index.d.ts b/node_modules/@octokit/webhooks/dist-types/index.d.ts new file mode 100644 index 0000000..e1fbb19 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/index.d.ts @@ -0,0 +1,18 @@ +import { createEventHandler } from "./event-handler/index"; +import { EmitterWebhookEvent, EmitterWebhookEventName, HandlerFunction, RemoveHandlerFunction, Options, WebhookError, WebhookEventHandlerError, EmitterWebhookEventWithStringPayloadAndSignature, EmitterWebhookEventWithSignature } from "./types"; +export { createNodeMiddleware } from "./middleware/node/index"; +export { emitterEventNames } from "./generated/webhook-names"; +declare class Webhooks { + sign: (payload: string | object) => Promise; + verify: (eventPayload: string | object, signature: string) => Promise; + on: (event: E | E[], callback: HandlerFunction) => void; + onAny: (callback: (event: EmitterWebhookEvent) => any) => void; + onError: (callback: (event: WebhookEventHandlerError) => any) => void; + removeListener: (event: E | E[], callback: RemoveHandlerFunction) => void; + receive: (event: EmitterWebhookEvent) => Promise; + verifyAndReceive: (options: EmitterWebhookEventWithStringPayloadAndSignature | EmitterWebhookEventWithSignature) => Promise; + constructor(options: Options & { + secret: string; + }); +} +export { createEventHandler, Webhooks, EmitterWebhookEvent, EmitterWebhookEventName, WebhookError, }; diff --git a/node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts b/node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts new file mode 100644 index 0000000..7ef973a --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/middleware/node/get-missing-headers.d.ts @@ -0,0 +1,3 @@ +declare type IncomingMessage = any; +export declare function getMissingHeaders(request: IncomingMessage): string[]; +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts b/node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts new file mode 100644 index 0000000..88e2be7 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/middleware/node/get-payload.d.ts @@ -0,0 +1,4 @@ +import { WebhookEvent } from "@octokit/webhooks-types"; +declare type IncomingMessage = any; +export declare function getPayload(request: IncomingMessage): Promise; +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts b/node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts new file mode 100644 index 0000000..88cae83 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/middleware/node/index.d.ts @@ -0,0 +1,3 @@ +import { Webhooks } from "../../index"; +import { MiddlewareOptions } from "./types"; +export declare function createNodeMiddleware(webhooks: Webhooks, { path, onUnhandledRequest, log, }?: MiddlewareOptions): (request: any, response: any, next?: Function | undefined) => Promise; diff --git a/node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts b/node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts new file mode 100644 index 0000000..b1b2030 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/middleware/node/middleware.d.ts @@ -0,0 +1,6 @@ +declare type IncomingMessage = any; +declare type ServerResponse = any; +import { Webhooks } from "../../index"; +import { MiddlewareOptions } from "./types"; +export declare function middleware(webhooks: Webhooks, options: Required, request: IncomingMessage, response: ServerResponse, next?: Function): Promise; +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts b/node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts new file mode 100644 index 0000000..07a9cf6 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/middleware/node/on-unhandled-request-default.d.ts @@ -0,0 +1,4 @@ +declare type IncomingMessage = any; +declare type ServerResponse = any; +export declare function onUnhandledRequestDefault(request: IncomingMessage, response: ServerResponse): void; +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts b/node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts new file mode 100644 index 0000000..2be04cb --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/middleware/node/types.d.ts @@ -0,0 +1,9 @@ +declare type IncomingMessage = any; +declare type ServerResponse = any; +import { Logger } from "../../createLogger"; +export declare type MiddlewareOptions = { + path?: string; + log?: Logger; + onUnhandledRequest?: (request: IncomingMessage, response: ServerResponse) => void; +}; +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/sign.d.ts b/node_modules/@octokit/webhooks/dist-types/sign.d.ts new file mode 100644 index 0000000..14979a7 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/sign.d.ts @@ -0,0 +1 @@ +export declare function sign(secret: string, payload: string | object): Promise; diff --git a/node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts b/node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts new file mode 100644 index 0000000..e2103fc --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/to-normalized-json-string.d.ts @@ -0,0 +1,4 @@ +/** + * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end + */ +export declare function toNormalizedJsonString(payload: object): string; diff --git a/node_modules/@octokit/webhooks/dist-types/types.d.ts b/node_modules/@octokit/webhooks/dist-types/types.d.ts new file mode 100644 index 0000000..a689619 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/types.d.ts @@ -0,0 +1,62 @@ +import { RequestError } from "@octokit/request-error"; +import type { WebhookEventMap, WebhookEventName } from "@octokit/webhooks-types"; +import { Logger } from "./createLogger"; +import type { emitterEventNames } from "./generated/webhook-names"; +export declare type EmitterWebhookEventName = typeof emitterEventNames[number]; +export declare type EmitterWebhookEvent = TEmitterEvent extends `${infer TWebhookEvent}.${infer TAction}` ? BaseWebhookEvent> & { + payload: { + action: TAction; + }; +} : BaseWebhookEvent>; +export declare type EmitterWebhookEventWithStringPayloadAndSignature = { + id: string; + name: EmitterWebhookEventName; + payload: string; + signature: string; +}; +export declare type EmitterWebhookEventWithSignature = EmitterWebhookEvent & { + signature: string; +}; +interface BaseWebhookEvent { + id: string; + name: TName; + payload: WebhookEventMap[TName]; +} +export interface Options { + secret?: string; + transform?: TransformMethod; + log?: Partial; +} +declare type TransformMethod = (event: EmitterWebhookEvent) => T | PromiseLike; +export declare type HandlerFunction = (event: EmitterWebhookEvent & TTransformed) => any; +export declare type RemoveHandlerFunction = (event: EmitterWebhookEvent> & TTransformed) => any; +declare type Hooks = { + [key: string]: Function[]; +}; +export interface State extends Options { + eventHandler?: any; + hooks: Hooks; + log: Logger; +} +/** + * Error object with optional properties coming from `octokit.request` errors + */ +export declare type WebhookError = Error & Partial; +export interface WebhookEventHandlerError extends AggregateError { + event: EmitterWebhookEvent; +} +/** + * Workaround for TypeScript incompatibility with types exported by aggregate-error. + * Credit: https://git.io/JUEEr + * @copyright Sindre Sorhus + * @license MIT (https://git.io/JUEEK) + * @see https://github.com/octokit/webhooks.js/pull/270/files + */ +declare class AggregateError extends Error implements Iterable { + readonly name: "AggregateError"; + constructor(errors: ReadonlyArray); + [Symbol.iterator](): IterableIterator; +} +export {}; diff --git a/node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts b/node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts new file mode 100644 index 0000000..ecaecc1 --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/verify-and-receive.d.ts @@ -0,0 +1,4 @@ +import { EmitterWebhookEventWithStringPayloadAndSignature, EmitterWebhookEventWithSignature, State } from "./types"; +export declare function verifyAndReceive(state: State & { + secret: string; +}, event: EmitterWebhookEventWithStringPayloadAndSignature | EmitterWebhookEventWithSignature): Promise; diff --git a/node_modules/@octokit/webhooks/dist-types/verify.d.ts b/node_modules/@octokit/webhooks/dist-types/verify.d.ts new file mode 100644 index 0000000..bd16e9c --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-types/verify.d.ts @@ -0,0 +1 @@ +export declare function verify(secret: string, payload: string | object, signature: string): Promise; diff --git a/node_modules/@octokit/webhooks/dist-web/index.js b/node_modules/@octokit/webhooks/dist-web/index.js new file mode 100644 index 0000000..d2101de --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-web/index.js @@ -0,0 +1,594 @@ +import AggregateError from 'aggregate-error'; +import { sign as sign$1, verify as verify$1 } from '@octokit/webhooks-methods'; + +const createLogger = (logger) => ({ + debug: () => { }, + info: () => { }, + warn: console.warn.bind(console), + error: console.error.bind(console), + ...logger, +}); + +// THIS FILE IS GENERATED - DO NOT EDIT DIRECTLY +// make edits in scripts/generate-types.ts +const emitterEventNames = [ + "branch_protection_rule", + "branch_protection_rule.created", + "branch_protection_rule.deleted", + "branch_protection_rule.edited", + "check_run", + "check_run.completed", + "check_run.created", + "check_run.requested_action", + "check_run.rerequested", + "check_suite", + "check_suite.completed", + "check_suite.requested", + "check_suite.rerequested", + "code_scanning_alert", + "code_scanning_alert.appeared_in_branch", + "code_scanning_alert.closed_by_user", + "code_scanning_alert.created", + "code_scanning_alert.fixed", + "code_scanning_alert.reopened", + "code_scanning_alert.reopened_by_user", + "commit_comment", + "commit_comment.created", + "create", + "delete", + "dependabot_alert", + "dependabot_alert.created", + "dependabot_alert.dismissed", + "dependabot_alert.fixed", + "dependabot_alert.reintroduced", + "dependabot_alert.reopened", + "deploy_key", + "deploy_key.created", + "deploy_key.deleted", + "deployment", + "deployment.created", + "deployment_status", + "deployment_status.created", + "discussion", + "discussion.answered", + "discussion.category_changed", + "discussion.created", + "discussion.deleted", + "discussion.edited", + "discussion.labeled", + "discussion.locked", + "discussion.pinned", + "discussion.transferred", + "discussion.unanswered", + "discussion.unlabeled", + "discussion.unlocked", + "discussion.unpinned", + "discussion_comment", + "discussion_comment.created", + "discussion_comment.deleted", + "discussion_comment.edited", + "fork", + "github_app_authorization", + "github_app_authorization.revoked", + "gollum", + "installation", + "installation.created", + "installation.deleted", + "installation.new_permissions_accepted", + "installation.suspend", + "installation.unsuspend", + "installation_repositories", + "installation_repositories.added", + "installation_repositories.removed", + "issue_comment", + "issue_comment.created", + "issue_comment.deleted", + "issue_comment.edited", + "issues", + "issues.assigned", + "issues.closed", + "issues.deleted", + "issues.demilestoned", + "issues.edited", + "issues.labeled", + "issues.locked", + "issues.milestoned", + "issues.opened", + "issues.pinned", + "issues.reopened", + "issues.transferred", + "issues.unassigned", + "issues.unlabeled", + "issues.unlocked", + "issues.unpinned", + "label", + "label.created", + "label.deleted", + "label.edited", + "marketplace_purchase", + "marketplace_purchase.cancelled", + "marketplace_purchase.changed", + "marketplace_purchase.pending_change", + "marketplace_purchase.pending_change_cancelled", + "marketplace_purchase.purchased", + "member", + "member.added", + "member.edited", + "member.removed", + "membership", + "membership.added", + "membership.removed", + "merge_group", + "merge_group.checks_requested", + "meta", + "meta.deleted", + "milestone", + "milestone.closed", + "milestone.created", + "milestone.deleted", + "milestone.edited", + "milestone.opened", + "org_block", + "org_block.blocked", + "org_block.unblocked", + "organization", + "organization.deleted", + "organization.member_added", + "organization.member_invited", + "organization.member_removed", + "organization.renamed", + "package", + "package.published", + "package.updated", + "page_build", + "ping", + "project", + "project.closed", + "project.created", + "project.deleted", + "project.edited", + "project.reopened", + "project_card", + "project_card.converted", + "project_card.created", + "project_card.deleted", + "project_card.edited", + "project_card.moved", + "project_column", + "project_column.created", + "project_column.deleted", + "project_column.edited", + "project_column.moved", + "projects_v2_item", + "projects_v2_item.archived", + "projects_v2_item.converted", + "projects_v2_item.created", + "projects_v2_item.deleted", + "projects_v2_item.edited", + "projects_v2_item.reordered", + "projects_v2_item.restored", + "public", + "pull_request", + "pull_request.assigned", + "pull_request.auto_merge_disabled", + "pull_request.auto_merge_enabled", + "pull_request.closed", + "pull_request.converted_to_draft", + "pull_request.dequeued", + "pull_request.edited", + "pull_request.labeled", + "pull_request.locked", + "pull_request.opened", + "pull_request.queued", + "pull_request.ready_for_review", + "pull_request.reopened", + "pull_request.review_request_removed", + "pull_request.review_requested", + "pull_request.synchronize", + "pull_request.unassigned", + "pull_request.unlabeled", + "pull_request.unlocked", + "pull_request_review", + "pull_request_review.dismissed", + "pull_request_review.edited", + "pull_request_review.submitted", + "pull_request_review_comment", + "pull_request_review_comment.created", + "pull_request_review_comment.deleted", + "pull_request_review_comment.edited", + "pull_request_review_thread", + "pull_request_review_thread.resolved", + "pull_request_review_thread.unresolved", + "push", + "release", + "release.created", + "release.deleted", + "release.edited", + "release.prereleased", + "release.published", + "release.released", + "release.unpublished", + "repository", + "repository.archived", + "repository.created", + "repository.deleted", + "repository.edited", + "repository.privatized", + "repository.publicized", + "repository.renamed", + "repository.transferred", + "repository.unarchived", + "repository_dispatch", + "repository_import", + "repository_vulnerability_alert", + "repository_vulnerability_alert.create", + "repository_vulnerability_alert.dismiss", + "repository_vulnerability_alert.reopen", + "repository_vulnerability_alert.resolve", + "secret_scanning_alert", + "secret_scanning_alert.created", + "secret_scanning_alert.reopened", + "secret_scanning_alert.resolved", + "security_advisory", + "security_advisory.performed", + "security_advisory.published", + "security_advisory.updated", + "security_advisory.withdrawn", + "sponsorship", + "sponsorship.cancelled", + "sponsorship.created", + "sponsorship.edited", + "sponsorship.pending_cancellation", + "sponsorship.pending_tier_change", + "sponsorship.tier_changed", + "star", + "star.created", + "star.deleted", + "status", + "team", + "team.added_to_repository", + "team.created", + "team.deleted", + "team.edited", + "team.removed_from_repository", + "team_add", + "watch", + "watch.started", + "workflow_dispatch", + "workflow_job", + "workflow_job.completed", + "workflow_job.in_progress", + "workflow_job.queued", + "workflow_run", + "workflow_run.completed", + "workflow_run.in_progress", + "workflow_run.requested", +]; + +function handleEventHandlers(state, webhookName, handler) { + if (!state.hooks[webhookName]) { + state.hooks[webhookName] = []; + } + state.hooks[webhookName].push(handler); +} +function receiverOn(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach((webhookName) => receiverOn(state, webhookName, handler)); + return; + } + if (["*", "error"].includes(webhookNameOrNames)) { + const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; + const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; + throw new Error(message); + } + if (!emitterEventNames.includes(webhookNameOrNames)) { + state.log.warn(`"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`); + } + handleEventHandlers(state, webhookNameOrNames, handler); +} +function receiverOnAny(state, handler) { + handleEventHandlers(state, "*", handler); +} +function receiverOnError(state, handler) { + handleEventHandlers(state, "error", handler); +} + +// Errors thrown or rejected Promises in "error" event handlers are not handled +// as they are in the webhook event handlers. If errors occur, we log a +// "Fatal: Error occurred" message to stdout +function wrapErrorHandler(handler, error) { + let returnValue; + try { + returnValue = handler(error); + } + catch (error) { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error); + } + if (returnValue && returnValue.catch) { + returnValue.catch((error) => { + console.log('FATAL: Error occurred in "error" event handler'); + console.log(error); + }); + } +} + +// @ts-ignore to address #245 +function getHooks(state, eventPayloadAction, eventName) { + const hooks = [state.hooks[eventName], state.hooks["*"]]; + if (eventPayloadAction) { + hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); + } + return [].concat(...hooks.filter(Boolean)); +} +// main handler function +function receiverHandle(state, event) { + const errorHandlers = state.hooks.error || []; + if (event instanceof Error) { + const error = Object.assign(new AggregateError([event]), { + event, + errors: [event], + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + return Promise.reject(error); + } + if (!event || !event.name) { + throw new AggregateError(["Event name not passed"]); + } + if (!event.payload) { + throw new AggregateError(["Event payload not passed"]); + } + // flatten arrays of event listeners and remove undefined values + const hooks = getHooks(state, "action" in event.payload ? event.payload.action : null, event.name); + if (hooks.length === 0) { + return Promise.resolve(); + } + const errors = []; + const promises = hooks.map((handler) => { + let promise = Promise.resolve(event); + if (state.transform) { + promise = promise.then(state.transform); + } + return promise + .then((event) => { + return handler(event); + }) + .catch((error) => errors.push(Object.assign(error, { event }))); + }); + return Promise.all(promises).then(() => { + if (errors.length === 0) { + return; + } + const error = new AggregateError(errors); + Object.assign(error, { + event, + errors, + }); + errorHandlers.forEach((handler) => wrapErrorHandler(handler, error)); + throw error; + }); +} + +function removeListener(state, webhookNameOrNames, handler) { + if (Array.isArray(webhookNameOrNames)) { + webhookNameOrNames.forEach((webhookName) => removeListener(state, webhookName, handler)); + return; + } + if (!state.hooks[webhookNameOrNames]) { + return; + } + // remove last hook that has been added, that way + // it behaves the same as removeListener + for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { + if (state.hooks[webhookNameOrNames][i] === handler) { + state.hooks[webhookNameOrNames].splice(i, 1); + return; + } + } +} + +function createEventHandler(options) { + const state = { + hooks: {}, + log: createLogger(options && options.log), + }; + if (options && options.transform) { + state.transform = options.transform; + } + return { + on: receiverOn.bind(null, state), + onAny: receiverOnAny.bind(null, state), + onError: receiverOnError.bind(null, state), + removeListener: removeListener.bind(null, state), + receive: receiverHandle.bind(null, state), + }; +} + +/** + * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end + */ +function toNormalizedJsonString(payload) { + const payloadString = JSON.stringify(payload); + return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, (s) => { + return s.substr(0, 3) + s.substr(3).toUpperCase(); + }); +} + +async function sign(secret, payload) { + return sign$1(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload)); +} + +async function verify(secret, payload, signature) { + return verify$1(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload), signature); +} + +async function verifyAndReceive(state, event) { + // verify will validate that the secret is not undefined + const matchesSignature = await verify$1(state.secret, typeof event.payload === "object" + ? toNormalizedJsonString(event.payload) + : event.payload, event.signature); + if (!matchesSignature) { + const error = new Error("[@octokit/webhooks] signature does not match event payload and secret"); + return state.eventHandler.receive(Object.assign(error, { event, status: 400 })); + } + return state.eventHandler.receive({ + id: event.id, + name: event.name, + payload: typeof event.payload === "string" + ? JSON.parse(event.payload) + : event.payload, + }); +} + +const WEBHOOK_HEADERS = [ + "x-github-event", + "x-hub-signature-256", + "x-github-delivery", +]; +// https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#delivery-headers +function getMissingHeaders(request) { + return WEBHOOK_HEADERS.filter((header) => !(header in request.headers)); +} + +// @ts-ignore to address #245 +function getPayload(request) { + // If request.body already exists we can stop here + // See https://github.com/octokit/webhooks.js/pull/23 + if (request.body) + return Promise.resolve(request.body); + return new Promise((resolve, reject) => { + let data = ""; + request.setEncoding("utf8"); + // istanbul ignore next + request.on("error", (error) => reject(new AggregateError([error]))); + request.on("data", (chunk) => (data += chunk)); + request.on("end", () => { + try { + resolve(JSON.parse(data)); + } + catch (error) { + error.message = "Invalid JSON"; + error.status = 400; + reject(new AggregateError([error])); + } + }); + }); +} + +async function middleware(webhooks, options, request, response, next) { + let pathname; + try { + pathname = new URL(request.url, "http://localhost").pathname; + } + catch (error) { + response.writeHead(422, { + "content-type": "application/json", + }); + response.end(JSON.stringify({ + error: `Request URL could not be parsed: ${request.url}`, + })); + return; + } + const isUnknownRoute = request.method !== "POST" || pathname !== options.path; + const isExpressMiddleware = typeof next === "function"; + if (isUnknownRoute) { + if (isExpressMiddleware) { + return next(); + } + else { + return options.onUnhandledRequest(request, response); + } + } + const missingHeaders = getMissingHeaders(request).join(", "); + if (missingHeaders) { + response.writeHead(400, { + "content-type": "application/json", + }); + response.end(JSON.stringify({ + error: `Required headers missing: ${missingHeaders}`, + })); + return; + } + const eventName = request.headers["x-github-event"]; + const signatureSHA256 = request.headers["x-hub-signature-256"]; + const id = request.headers["x-github-delivery"]; + options.log.debug(`${eventName} event received (id: ${id})`); + // GitHub will abort the request if it does not receive a response within 10s + // See https://github.com/octokit/webhooks.js/issues/185 + let didTimeout = false; + const timeout = setTimeout(() => { + didTimeout = true; + response.statusCode = 202; + response.end("still processing\n"); + }, 9000).unref(); + try { + const payload = await getPayload(request); + await webhooks.verifyAndReceive({ + id: id, + name: eventName, + payload: payload, + signature: signatureSHA256, + }); + clearTimeout(timeout); + if (didTimeout) + return; + response.end("ok\n"); + } + catch (error) { + clearTimeout(timeout); + if (didTimeout) + return; + const err = Array.from(error)[0]; + const errorMessage = err.message + ? `${err.name}: ${err.message}` + : "Error: An Unspecified error occurred"; + response.statusCode = typeof err.status !== "undefined" ? err.status : 500; + options.log.error(error); + response.end(JSON.stringify({ + error: errorMessage, + })); + } +} + +function onUnhandledRequestDefault(request, response) { + response.writeHead(404, { + "content-type": "application/json", + }); + response.end(JSON.stringify({ + error: `Unknown route: ${request.method} ${request.url}`, + })); +} + +function createNodeMiddleware(webhooks, { path = "/api/github/webhooks", onUnhandledRequest = onUnhandledRequestDefault, log = createLogger(), } = {}) { + return middleware.bind(null, webhooks, { + path, + onUnhandledRequest, + log, + }); +} + +// U holds the return value of `transform` function in Options +class Webhooks { + constructor(options) { + if (!options || !options.secret) { + throw new Error("[@octokit/webhooks] options.secret required"); + } + const state = { + eventHandler: createEventHandler(options), + secret: options.secret, + hooks: {}, + log: createLogger(options.log), + }; + this.sign = sign.bind(null, options.secret); + this.verify = verify.bind(null, options.secret); + this.on = state.eventHandler.on; + this.onAny = state.eventHandler.onAny; + this.onError = state.eventHandler.onError; + this.removeListener = state.eventHandler.removeListener; + this.receive = state.eventHandler.receive; + this.verifyAndReceive = verifyAndReceive.bind(null, state); + } +} + +export { Webhooks, createEventHandler, createNodeMiddleware, emitterEventNames }; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@octokit/webhooks/dist-web/index.js.map b/node_modules/@octokit/webhooks/dist-web/index.js.map new file mode 100644 index 0000000..e87c7ae --- /dev/null +++ b/node_modules/@octokit/webhooks/dist-web/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../dist-src/createLogger.js","../dist-src/generated/webhook-names.js","../dist-src/event-handler/on.js","../dist-src/event-handler/wrap-error-handler.js","../dist-src/event-handler/receive.js","../dist-src/event-handler/remove-listener.js","../dist-src/event-handler/index.js","../dist-src/to-normalized-json-string.js","../dist-src/sign.js","../dist-src/verify.js","../dist-src/verify-and-receive.js","../dist-src/middleware/node/get-missing-headers.js","../dist-src/middleware/node/get-payload.js","../dist-src/middleware/node/middleware.js","../dist-src/middleware/node/on-unhandled-request-default.js","../dist-src/middleware/node/index.js","../dist-src/index.js"],"sourcesContent":["export const createLogger = (logger) => ({\n debug: () => { },\n info: () => { },\n warn: console.warn.bind(console),\n error: console.error.bind(console),\n ...logger,\n});\n","// THIS FILE IS GENERATED - DO NOT EDIT DIRECTLY\n// make edits in scripts/generate-types.ts\nexport const emitterEventNames = [\n \"branch_protection_rule\",\n \"branch_protection_rule.created\",\n \"branch_protection_rule.deleted\",\n \"branch_protection_rule.edited\",\n \"check_run\",\n \"check_run.completed\",\n \"check_run.created\",\n \"check_run.requested_action\",\n \"check_run.rerequested\",\n \"check_suite\",\n \"check_suite.completed\",\n \"check_suite.requested\",\n \"check_suite.rerequested\",\n \"code_scanning_alert\",\n \"code_scanning_alert.appeared_in_branch\",\n \"code_scanning_alert.closed_by_user\",\n \"code_scanning_alert.created\",\n \"code_scanning_alert.fixed\",\n \"code_scanning_alert.reopened\",\n \"code_scanning_alert.reopened_by_user\",\n \"commit_comment\",\n \"commit_comment.created\",\n \"create\",\n \"delete\",\n \"dependabot_alert\",\n \"dependabot_alert.created\",\n \"dependabot_alert.dismissed\",\n \"dependabot_alert.fixed\",\n \"dependabot_alert.reintroduced\",\n \"dependabot_alert.reopened\",\n \"deploy_key\",\n \"deploy_key.created\",\n \"deploy_key.deleted\",\n \"deployment\",\n \"deployment.created\",\n \"deployment_status\",\n \"deployment_status.created\",\n \"discussion\",\n \"discussion.answered\",\n \"discussion.category_changed\",\n \"discussion.created\",\n \"discussion.deleted\",\n \"discussion.edited\",\n \"discussion.labeled\",\n \"discussion.locked\",\n \"discussion.pinned\",\n \"discussion.transferred\",\n \"discussion.unanswered\",\n \"discussion.unlabeled\",\n \"discussion.unlocked\",\n \"discussion.unpinned\",\n \"discussion_comment\",\n \"discussion_comment.created\",\n \"discussion_comment.deleted\",\n \"discussion_comment.edited\",\n \"fork\",\n \"github_app_authorization\",\n \"github_app_authorization.revoked\",\n \"gollum\",\n \"installation\",\n \"installation.created\",\n \"installation.deleted\",\n \"installation.new_permissions_accepted\",\n \"installation.suspend\",\n \"installation.unsuspend\",\n \"installation_repositories\",\n \"installation_repositories.added\",\n \"installation_repositories.removed\",\n \"issue_comment\",\n \"issue_comment.created\",\n \"issue_comment.deleted\",\n \"issue_comment.edited\",\n \"issues\",\n \"issues.assigned\",\n \"issues.closed\",\n \"issues.deleted\",\n \"issues.demilestoned\",\n \"issues.edited\",\n \"issues.labeled\",\n \"issues.locked\",\n \"issues.milestoned\",\n \"issues.opened\",\n \"issues.pinned\",\n \"issues.reopened\",\n \"issues.transferred\",\n \"issues.unassigned\",\n \"issues.unlabeled\",\n \"issues.unlocked\",\n \"issues.unpinned\",\n \"label\",\n \"label.created\",\n \"label.deleted\",\n \"label.edited\",\n \"marketplace_purchase\",\n \"marketplace_purchase.cancelled\",\n \"marketplace_purchase.changed\",\n \"marketplace_purchase.pending_change\",\n \"marketplace_purchase.pending_change_cancelled\",\n \"marketplace_purchase.purchased\",\n \"member\",\n \"member.added\",\n \"member.edited\",\n \"member.removed\",\n \"membership\",\n \"membership.added\",\n \"membership.removed\",\n \"merge_group\",\n \"merge_group.checks_requested\",\n \"meta\",\n \"meta.deleted\",\n \"milestone\",\n \"milestone.closed\",\n \"milestone.created\",\n \"milestone.deleted\",\n \"milestone.edited\",\n \"milestone.opened\",\n \"org_block\",\n \"org_block.blocked\",\n \"org_block.unblocked\",\n \"organization\",\n \"organization.deleted\",\n \"organization.member_added\",\n \"organization.member_invited\",\n \"organization.member_removed\",\n \"organization.renamed\",\n \"package\",\n \"package.published\",\n \"package.updated\",\n \"page_build\",\n \"ping\",\n \"project\",\n \"project.closed\",\n \"project.created\",\n \"project.deleted\",\n \"project.edited\",\n \"project.reopened\",\n \"project_card\",\n \"project_card.converted\",\n \"project_card.created\",\n \"project_card.deleted\",\n \"project_card.edited\",\n \"project_card.moved\",\n \"project_column\",\n \"project_column.created\",\n \"project_column.deleted\",\n \"project_column.edited\",\n \"project_column.moved\",\n \"projects_v2_item\",\n \"projects_v2_item.archived\",\n \"projects_v2_item.converted\",\n \"projects_v2_item.created\",\n \"projects_v2_item.deleted\",\n \"projects_v2_item.edited\",\n \"projects_v2_item.reordered\",\n \"projects_v2_item.restored\",\n \"public\",\n \"pull_request\",\n \"pull_request.assigned\",\n \"pull_request.auto_merge_disabled\",\n \"pull_request.auto_merge_enabled\",\n \"pull_request.closed\",\n \"pull_request.converted_to_draft\",\n \"pull_request.dequeued\",\n \"pull_request.edited\",\n \"pull_request.labeled\",\n \"pull_request.locked\",\n \"pull_request.opened\",\n \"pull_request.queued\",\n \"pull_request.ready_for_review\",\n \"pull_request.reopened\",\n \"pull_request.review_request_removed\",\n \"pull_request.review_requested\",\n \"pull_request.synchronize\",\n \"pull_request.unassigned\",\n \"pull_request.unlabeled\",\n \"pull_request.unlocked\",\n \"pull_request_review\",\n \"pull_request_review.dismissed\",\n \"pull_request_review.edited\",\n \"pull_request_review.submitted\",\n \"pull_request_review_comment\",\n \"pull_request_review_comment.created\",\n \"pull_request_review_comment.deleted\",\n \"pull_request_review_comment.edited\",\n \"pull_request_review_thread\",\n \"pull_request_review_thread.resolved\",\n \"pull_request_review_thread.unresolved\",\n \"push\",\n \"release\",\n \"release.created\",\n \"release.deleted\",\n \"release.edited\",\n \"release.prereleased\",\n \"release.published\",\n \"release.released\",\n \"release.unpublished\",\n \"repository\",\n \"repository.archived\",\n \"repository.created\",\n \"repository.deleted\",\n \"repository.edited\",\n \"repository.privatized\",\n \"repository.publicized\",\n \"repository.renamed\",\n \"repository.transferred\",\n \"repository.unarchived\",\n \"repository_dispatch\",\n \"repository_import\",\n \"repository_vulnerability_alert\",\n \"repository_vulnerability_alert.create\",\n \"repository_vulnerability_alert.dismiss\",\n \"repository_vulnerability_alert.reopen\",\n \"repository_vulnerability_alert.resolve\",\n \"secret_scanning_alert\",\n \"secret_scanning_alert.created\",\n \"secret_scanning_alert.reopened\",\n \"secret_scanning_alert.resolved\",\n \"security_advisory\",\n \"security_advisory.performed\",\n \"security_advisory.published\",\n \"security_advisory.updated\",\n \"security_advisory.withdrawn\",\n \"sponsorship\",\n \"sponsorship.cancelled\",\n \"sponsorship.created\",\n \"sponsorship.edited\",\n \"sponsorship.pending_cancellation\",\n \"sponsorship.pending_tier_change\",\n \"sponsorship.tier_changed\",\n \"star\",\n \"star.created\",\n \"star.deleted\",\n \"status\",\n \"team\",\n \"team.added_to_repository\",\n \"team.created\",\n \"team.deleted\",\n \"team.edited\",\n \"team.removed_from_repository\",\n \"team_add\",\n \"watch\",\n \"watch.started\",\n \"workflow_dispatch\",\n \"workflow_job\",\n \"workflow_job.completed\",\n \"workflow_job.in_progress\",\n \"workflow_job.queued\",\n \"workflow_run\",\n \"workflow_run.completed\",\n \"workflow_run.in_progress\",\n \"workflow_run.requested\",\n];\n","import { emitterEventNames } from \"../generated/webhook-names\";\nfunction handleEventHandlers(state, webhookName, handler) {\n if (!state.hooks[webhookName]) {\n state.hooks[webhookName] = [];\n }\n state.hooks[webhookName].push(handler);\n}\nexport function receiverOn(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach((webhookName) => receiverOn(state, webhookName, handler));\n return;\n }\n if ([\"*\", \"error\"].includes(webhookNameOrNames)) {\n const webhookName = webhookNameOrNames === \"*\" ? \"any\" : webhookNameOrNames;\n const message = `Using the \"${webhookNameOrNames}\" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`;\n throw new Error(message);\n }\n if (!emitterEventNames.includes(webhookNameOrNames)) {\n state.log.warn(`\"${webhookNameOrNames}\" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`);\n }\n handleEventHandlers(state, webhookNameOrNames, handler);\n}\nexport function receiverOnAny(state, handler) {\n handleEventHandlers(state, \"*\", handler);\n}\nexport function receiverOnError(state, handler) {\n handleEventHandlers(state, \"error\", handler);\n}\n","// Errors thrown or rejected Promises in \"error\" event handlers are not handled\n// as they are in the webhook event handlers. If errors occur, we log a\n// \"Fatal: Error occurred\" message to stdout\nexport function wrapErrorHandler(handler, error) {\n let returnValue;\n try {\n returnValue = handler(error);\n }\n catch (error) {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error);\n }\n if (returnValue && returnValue.catch) {\n returnValue.catch((error) => {\n console.log('FATAL: Error occurred in \"error\" event handler');\n console.log(error);\n });\n }\n}\n","// @ts-ignore to address #245\nimport AggregateError from \"aggregate-error\";\nimport { wrapErrorHandler } from \"./wrap-error-handler\";\nfunction getHooks(state, eventPayloadAction, eventName) {\n const hooks = [state.hooks[eventName], state.hooks[\"*\"]];\n if (eventPayloadAction) {\n hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]);\n }\n return [].concat(...hooks.filter(Boolean));\n}\n// main handler function\nexport function receiverHandle(state, event) {\n const errorHandlers = state.hooks.error || [];\n if (event instanceof Error) {\n const error = Object.assign(new AggregateError([event]), {\n event,\n errors: [event],\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n return Promise.reject(error);\n }\n if (!event || !event.name) {\n throw new AggregateError([\"Event name not passed\"]);\n }\n if (!event.payload) {\n throw new AggregateError([\"Event payload not passed\"]);\n }\n // flatten arrays of event listeners and remove undefined values\n const hooks = getHooks(state, \"action\" in event.payload ? event.payload.action : null, event.name);\n if (hooks.length === 0) {\n return Promise.resolve();\n }\n const errors = [];\n const promises = hooks.map((handler) => {\n let promise = Promise.resolve(event);\n if (state.transform) {\n promise = promise.then(state.transform);\n }\n return promise\n .then((event) => {\n return handler(event);\n })\n .catch((error) => errors.push(Object.assign(error, { event })));\n });\n return Promise.all(promises).then(() => {\n if (errors.length === 0) {\n return;\n }\n const error = new AggregateError(errors);\n Object.assign(error, {\n event,\n errors,\n });\n errorHandlers.forEach((handler) => wrapErrorHandler(handler, error));\n throw error;\n });\n}\n","export function removeListener(state, webhookNameOrNames, handler) {\n if (Array.isArray(webhookNameOrNames)) {\n webhookNameOrNames.forEach((webhookName) => removeListener(state, webhookName, handler));\n return;\n }\n if (!state.hooks[webhookNameOrNames]) {\n return;\n }\n // remove last hook that has been added, that way\n // it behaves the same as removeListener\n for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) {\n if (state.hooks[webhookNameOrNames][i] === handler) {\n state.hooks[webhookNameOrNames].splice(i, 1);\n return;\n }\n }\n}\n","import { createLogger } from \"../createLogger\";\nimport { receiverOn as on, receiverOnAny as onAny, receiverOnError as onError, } from \"./on\";\nimport { receiverHandle as receive } from \"./receive\";\nimport { removeListener } from \"./remove-listener\";\nexport function createEventHandler(options) {\n const state = {\n hooks: {},\n log: createLogger(options && options.log),\n };\n if (options && options.transform) {\n state.transform = options.transform;\n }\n return {\n on: on.bind(null, state),\n onAny: onAny.bind(null, state),\n onError: onError.bind(null, state),\n removeListener: removeListener.bind(null, state),\n receive: receive.bind(null, state),\n };\n}\n","/**\n * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end\n */\nexport function toNormalizedJsonString(payload) {\n const payloadString = JSON.stringify(payload);\n return payloadString.replace(/[^\\\\]\\\\u[\\da-f]{4}/g, (s) => {\n return s.substr(0, 3) + s.substr(3).toUpperCase();\n });\n}\n","import { sign as signMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nexport async function sign(secret, payload) {\n return signMethod(secret, typeof payload === \"string\" ? payload : toNormalizedJsonString(payload));\n}\n","import { verify as verifyMethod } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nexport async function verify(secret, payload, signature) {\n return verifyMethod(secret, typeof payload === \"string\" ? payload : toNormalizedJsonString(payload), signature);\n}\n","import { verify } from \"@octokit/webhooks-methods\";\nimport { toNormalizedJsonString } from \"./to-normalized-json-string\";\nexport async function verifyAndReceive(state, event) {\n // verify will validate that the secret is not undefined\n const matchesSignature = await verify(state.secret, typeof event.payload === \"object\"\n ? toNormalizedJsonString(event.payload)\n : event.payload, event.signature);\n if (!matchesSignature) {\n const error = new Error(\"[@octokit/webhooks] signature does not match event payload and secret\");\n return state.eventHandler.receive(Object.assign(error, { event, status: 400 }));\n }\n return state.eventHandler.receive({\n id: event.id,\n name: event.name,\n payload: typeof event.payload === \"string\"\n ? JSON.parse(event.payload)\n : event.payload,\n });\n}\n","const WEBHOOK_HEADERS = [\n \"x-github-event\",\n \"x-hub-signature-256\",\n \"x-github-delivery\",\n];\n// https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#delivery-headers\nexport function getMissingHeaders(request) {\n return WEBHOOK_HEADERS.filter((header) => !(header in request.headers));\n}\n","// @ts-ignore to address #245\nimport AggregateError from \"aggregate-error\";\nexport function getPayload(request) {\n // If request.body already exists we can stop here\n // See https://github.com/octokit/webhooks.js/pull/23\n if (request.body)\n return Promise.resolve(request.body);\n return new Promise((resolve, reject) => {\n let data = \"\";\n request.setEncoding(\"utf8\");\n // istanbul ignore next\n request.on(\"error\", (error) => reject(new AggregateError([error])));\n request.on(\"data\", (chunk) => (data += chunk));\n request.on(\"end\", () => {\n try {\n resolve(JSON.parse(data));\n }\n catch (error) {\n error.message = \"Invalid JSON\";\n error.status = 400;\n reject(new AggregateError([error]));\n }\n });\n });\n}\n","import { getMissingHeaders } from \"./get-missing-headers\";\nimport { getPayload } from \"./get-payload\";\nexport async function middleware(webhooks, options, request, response, next) {\n let pathname;\n try {\n pathname = new URL(request.url, \"http://localhost\").pathname;\n }\n catch (error) {\n response.writeHead(422, {\n \"content-type\": \"application/json\",\n });\n response.end(JSON.stringify({\n error: `Request URL could not be parsed: ${request.url}`,\n }));\n return;\n }\n const isUnknownRoute = request.method !== \"POST\" || pathname !== options.path;\n const isExpressMiddleware = typeof next === \"function\";\n if (isUnknownRoute) {\n if (isExpressMiddleware) {\n return next();\n }\n else {\n return options.onUnhandledRequest(request, response);\n }\n }\n const missingHeaders = getMissingHeaders(request).join(\", \");\n if (missingHeaders) {\n response.writeHead(400, {\n \"content-type\": \"application/json\",\n });\n response.end(JSON.stringify({\n error: `Required headers missing: ${missingHeaders}`,\n }));\n return;\n }\n const eventName = request.headers[\"x-github-event\"];\n const signatureSHA256 = request.headers[\"x-hub-signature-256\"];\n const id = request.headers[\"x-github-delivery\"];\n options.log.debug(`${eventName} event received (id: ${id})`);\n // GitHub will abort the request if it does not receive a response within 10s\n // See https://github.com/octokit/webhooks.js/issues/185\n let didTimeout = false;\n const timeout = setTimeout(() => {\n didTimeout = true;\n response.statusCode = 202;\n response.end(\"still processing\\n\");\n }, 9000).unref();\n try {\n const payload = await getPayload(request);\n await webhooks.verifyAndReceive({\n id: id,\n name: eventName,\n payload: payload,\n signature: signatureSHA256,\n });\n clearTimeout(timeout);\n if (didTimeout)\n return;\n response.end(\"ok\\n\");\n }\n catch (error) {\n clearTimeout(timeout);\n if (didTimeout)\n return;\n const err = Array.from(error)[0];\n const errorMessage = err.message\n ? `${err.name}: ${err.message}`\n : \"Error: An Unspecified error occurred\";\n response.statusCode = typeof err.status !== \"undefined\" ? err.status : 500;\n options.log.error(error);\n response.end(JSON.stringify({\n error: errorMessage,\n }));\n }\n}\n","export function onUnhandledRequestDefault(request, response) {\n response.writeHead(404, {\n \"content-type\": \"application/json\",\n });\n response.end(JSON.stringify({\n error: `Unknown route: ${request.method} ${request.url}`,\n }));\n}\n","import { createLogger } from \"../../createLogger\";\nimport { middleware } from \"./middleware\";\nimport { onUnhandledRequestDefault } from \"./on-unhandled-request-default\";\nexport function createNodeMiddleware(webhooks, { path = \"/api/github/webhooks\", onUnhandledRequest = onUnhandledRequestDefault, log = createLogger(), } = {}) {\n return middleware.bind(null, webhooks, {\n path,\n onUnhandledRequest,\n log,\n });\n}\n","import { createLogger } from \"./createLogger\";\nimport { createEventHandler } from \"./event-handler/index\";\nimport { sign } from \"./sign\";\nimport { verify } from \"./verify\";\nimport { verifyAndReceive } from \"./verify-and-receive\";\nexport { createNodeMiddleware } from \"./middleware/node/index\";\nexport { emitterEventNames } from \"./generated/webhook-names\";\n// U holds the return value of `transform` function in Options\nclass Webhooks {\n constructor(options) {\n if (!options || !options.secret) {\n throw new Error(\"[@octokit/webhooks] options.secret required\");\n }\n const state = {\n eventHandler: createEventHandler(options),\n secret: options.secret,\n hooks: {},\n log: createLogger(options.log),\n };\n this.sign = sign.bind(null, options.secret);\n this.verify = verify.bind(null, options.secret);\n this.on = state.eventHandler.on;\n this.onAny = state.eventHandler.onAny;\n this.onError = state.eventHandler.onError;\n this.removeListener = state.eventHandler.removeListener;\n this.receive = state.eventHandler.receive;\n this.verifyAndReceive = verifyAndReceive.bind(null, state);\n }\n}\nexport { createEventHandler, Webhooks, };\n"],"names":["on","onAny","onError","receive","signMethod","verifyMethod","verify"],"mappings":";;;AAAO,MAAM,YAAY,GAAG,CAAC,MAAM,MAAM;AACzC,IAAI,KAAK,EAAE,MAAM,GAAG;AACpB,IAAI,IAAI,EAAE,MAAM,GAAG;AACnB,IAAI,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC;AACpC,IAAI,KAAK,EAAE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC;AACtC,IAAI,GAAG,MAAM;AACb,CAAC,CAAC;;ACNF;AACA;AACA,AAAY,MAAC,iBAAiB,GAAG;AACjC,IAAI,wBAAwB;AAC5B,IAAI,gCAAgC;AACpC,IAAI,gCAAgC;AACpC,IAAI,+BAA+B;AACnC,IAAI,WAAW;AACf,IAAI,qBAAqB;AACzB,IAAI,mBAAmB;AACvB,IAAI,4BAA4B;AAChC,IAAI,uBAAuB;AAC3B,IAAI,aAAa;AACjB,IAAI,uBAAuB;AAC3B,IAAI,uBAAuB;AAC3B,IAAI,yBAAyB;AAC7B,IAAI,qBAAqB;AACzB,IAAI,wCAAwC;AAC5C,IAAI,oCAAoC;AACxC,IAAI,6BAA6B;AACjC,IAAI,2BAA2B;AAC/B,IAAI,8BAA8B;AAClC,IAAI,sCAAsC;AAC1C,IAAI,gBAAgB;AACpB,IAAI,wBAAwB;AAC5B,IAAI,QAAQ;AACZ,IAAI,QAAQ;AACZ,IAAI,kBAAkB;AACtB,IAAI,0BAA0B;AAC9B,IAAI,4BAA4B;AAChC,IAAI,wBAAwB;AAC5B,IAAI,+BAA+B;AACnC,IAAI,2BAA2B;AAC/B,IAAI,YAAY;AAChB,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,YAAY;AAChB,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,2BAA2B;AAC/B,IAAI,YAAY;AAChB,IAAI,qBAAqB;AACzB,IAAI,6BAA6B;AACjC,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,mBAAmB;AACvB,IAAI,wBAAwB;AAC5B,IAAI,uBAAuB;AAC3B,IAAI,sBAAsB;AAC1B,IAAI,qBAAqB;AACzB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,4BAA4B;AAChC,IAAI,4BAA4B;AAChC,IAAI,2BAA2B;AAC/B,IAAI,MAAM;AACV,IAAI,0BAA0B;AAC9B,IAAI,kCAAkC;AACtC,IAAI,QAAQ;AACZ,IAAI,cAAc;AAClB,IAAI,sBAAsB;AAC1B,IAAI,sBAAsB;AAC1B,IAAI,uCAAuC;AAC3C,IAAI,sBAAsB;AAC1B,IAAI,wBAAwB;AAC5B,IAAI,2BAA2B;AAC/B,IAAI,iCAAiC;AACrC,IAAI,mCAAmC;AACvC,IAAI,eAAe;AACnB,IAAI,uBAAuB;AAC3B,IAAI,uBAAuB;AAC3B,IAAI,sBAAsB;AAC1B,IAAI,QAAQ;AACZ,IAAI,iBAAiB;AACrB,IAAI,eAAe;AACnB,IAAI,gBAAgB;AACpB,IAAI,qBAAqB;AACzB,IAAI,eAAe;AACnB,IAAI,gBAAgB;AACpB,IAAI,eAAe;AACnB,IAAI,mBAAmB;AACvB,IAAI,eAAe;AACnB,IAAI,eAAe;AACnB,IAAI,iBAAiB;AACrB,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,kBAAkB;AACtB,IAAI,iBAAiB;AACrB,IAAI,iBAAiB;AACrB,IAAI,OAAO;AACX,IAAI,eAAe;AACnB,IAAI,eAAe;AACnB,IAAI,cAAc;AAClB,IAAI,sBAAsB;AAC1B,IAAI,gCAAgC;AACpC,IAAI,8BAA8B;AAClC,IAAI,qCAAqC;AACzC,IAAI,+CAA+C;AACnD,IAAI,gCAAgC;AACpC,IAAI,QAAQ;AACZ,IAAI,cAAc;AAClB,IAAI,eAAe;AACnB,IAAI,gBAAgB;AACpB,IAAI,YAAY;AAChB,IAAI,kBAAkB;AACtB,IAAI,oBAAoB;AACxB,IAAI,aAAa;AACjB,IAAI,8BAA8B;AAClC,IAAI,MAAM;AACV,IAAI,cAAc;AAClB,IAAI,WAAW;AACf,IAAI,kBAAkB;AACtB,IAAI,mBAAmB;AACvB,IAAI,mBAAmB;AACvB,IAAI,kBAAkB;AACtB,IAAI,kBAAkB;AACtB,IAAI,WAAW;AACf,IAAI,mBAAmB;AACvB,IAAI,qBAAqB;AACzB,IAAI,cAAc;AAClB,IAAI,sBAAsB;AAC1B,IAAI,2BAA2B;AAC/B,IAAI,6BAA6B;AACjC,IAAI,6BAA6B;AACjC,IAAI,sBAAsB;AAC1B,IAAI,SAAS;AACb,IAAI,mBAAmB;AACvB,IAAI,iBAAiB;AACrB,IAAI,YAAY;AAChB,IAAI,MAAM;AACV,IAAI,SAAS;AACb,IAAI,gBAAgB;AACpB,IAAI,iBAAiB;AACrB,IAAI,iBAAiB;AACrB,IAAI,gBAAgB;AACpB,IAAI,kBAAkB;AACtB,IAAI,cAAc;AAClB,IAAI,wBAAwB;AAC5B,IAAI,sBAAsB;AAC1B,IAAI,sBAAsB;AAC1B,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,gBAAgB;AACpB,IAAI,wBAAwB;AAC5B,IAAI,wBAAwB;AAC5B,IAAI,uBAAuB;AAC3B,IAAI,sBAAsB;AAC1B,IAAI,kBAAkB;AACtB,IAAI,2BAA2B;AAC/B,IAAI,4BAA4B;AAChC,IAAI,0BAA0B;AAC9B,IAAI,0BAA0B;AAC9B,IAAI,yBAAyB;AAC7B,IAAI,4BAA4B;AAChC,IAAI,2BAA2B;AAC/B,IAAI,QAAQ;AACZ,IAAI,cAAc;AAClB,IAAI,uBAAuB;AAC3B,IAAI,kCAAkC;AACtC,IAAI,iCAAiC;AACrC,IAAI,qBAAqB;AACzB,IAAI,iCAAiC;AACrC,IAAI,uBAAuB;AAC3B,IAAI,qBAAqB;AACzB,IAAI,sBAAsB;AAC1B,IAAI,qBAAqB;AACzB,IAAI,qBAAqB;AACzB,IAAI,qBAAqB;AACzB,IAAI,+BAA+B;AACnC,IAAI,uBAAuB;AAC3B,IAAI,qCAAqC;AACzC,IAAI,+BAA+B;AACnC,IAAI,0BAA0B;AAC9B,IAAI,yBAAyB;AAC7B,IAAI,wBAAwB;AAC5B,IAAI,uBAAuB;AAC3B,IAAI,qBAAqB;AACzB,IAAI,+BAA+B;AACnC,IAAI,4BAA4B;AAChC,IAAI,+BAA+B;AACnC,IAAI,6BAA6B;AACjC,IAAI,qCAAqC;AACzC,IAAI,qCAAqC;AACzC,IAAI,oCAAoC;AACxC,IAAI,4BAA4B;AAChC,IAAI,qCAAqC;AACzC,IAAI,uCAAuC;AAC3C,IAAI,MAAM;AACV,IAAI,SAAS;AACb,IAAI,iBAAiB;AACrB,IAAI,iBAAiB;AACrB,IAAI,gBAAgB;AACpB,IAAI,qBAAqB;AACzB,IAAI,mBAAmB;AACvB,IAAI,kBAAkB;AACtB,IAAI,qBAAqB;AACzB,IAAI,YAAY;AAChB,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,oBAAoB;AACxB,IAAI,mBAAmB;AACvB,IAAI,uBAAuB;AAC3B,IAAI,uBAAuB;AAC3B,IAAI,oBAAoB;AACxB,IAAI,wBAAwB;AAC5B,IAAI,uBAAuB;AAC3B,IAAI,qBAAqB;AACzB,IAAI,mBAAmB;AACvB,IAAI,gCAAgC;AACpC,IAAI,uCAAuC;AAC3C,IAAI,wCAAwC;AAC5C,IAAI,uCAAuC;AAC3C,IAAI,wCAAwC;AAC5C,IAAI,uBAAuB;AAC3B,IAAI,+BAA+B;AACnC,IAAI,gCAAgC;AACpC,IAAI,gCAAgC;AACpC,IAAI,mBAAmB;AACvB,IAAI,6BAA6B;AACjC,IAAI,6BAA6B;AACjC,IAAI,2BAA2B;AAC/B,IAAI,6BAA6B;AACjC,IAAI,aAAa;AACjB,IAAI,uBAAuB;AAC3B,IAAI,qBAAqB;AACzB,IAAI,oBAAoB;AACxB,IAAI,kCAAkC;AACtC,IAAI,iCAAiC;AACrC,IAAI,0BAA0B;AAC9B,IAAI,MAAM;AACV,IAAI,cAAc;AAClB,IAAI,cAAc;AAClB,IAAI,QAAQ;AACZ,IAAI,MAAM;AACV,IAAI,0BAA0B;AAC9B,IAAI,cAAc;AAClB,IAAI,cAAc;AAClB,IAAI,aAAa;AACjB,IAAI,8BAA8B;AAClC,IAAI,UAAU;AACd,IAAI,OAAO;AACX,IAAI,eAAe;AACnB,IAAI,mBAAmB;AACvB,IAAI,cAAc;AAClB,IAAI,wBAAwB;AAC5B,IAAI,0BAA0B;AAC9B,IAAI,qBAAqB;AACzB,IAAI,cAAc;AAClB,IAAI,wBAAwB;AAC5B,IAAI,0BAA0B;AAC9B,IAAI,wBAAwB;AAC5B,CAAC;;AC7PD,SAAS,mBAAmB,CAAC,KAAK,EAAE,WAAW,EAAE,OAAO,EAAE;AAC1D,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,EAAE;AACnC,QAAQ,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;AACtC,KAAK;AACL,IAAI,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AAC3C,CAAC;AACD,AAAO,SAAS,UAAU,CAAC,KAAK,EAAE,kBAAkB,EAAE,OAAO,EAAE;AAC/D,IAAI,IAAI,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;AAC3C,QAAQ,kBAAkB,CAAC,OAAO,CAAC,CAAC,WAAW,KAAK,UAAU,CAAC,KAAK,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC,CAAC;AAC7F,QAAQ,OAAO;AACf,KAAK;AACL,IAAI,IAAI,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,kBAAkB,CAAC,EAAE;AACrD,QAAQ,MAAM,WAAW,GAAG,kBAAkB,KAAK,GAAG,GAAG,KAAK,GAAG,kBAAkB,CAAC;AACpF,QAAQ,MAAM,OAAO,GAAG,CAAC,WAAW,EAAE,kBAAkB,CAAC,4FAA4F,EAAE,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,WAAW,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC;AACrO,QAAQ,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;AACjC,KAAK;AACL,IAAI,IAAI,CAAC,iBAAiB,CAAC,QAAQ,CAAC,kBAAkB,CAAC,EAAE;AACzD,QAAQ,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,kBAAkB,CAAC,sFAAsF,CAAC,CAAC,CAAC;AACvI,KAAK;AACL,IAAI,mBAAmB,CAAC,KAAK,EAAE,kBAAkB,EAAE,OAAO,CAAC,CAAC;AAC5D,CAAC;AACD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,OAAO,EAAE;AAC9C,IAAI,mBAAmB,CAAC,KAAK,EAAE,GAAG,EAAE,OAAO,CAAC,CAAC;AAC7C,CAAC;AACD,AAAO,SAAS,eAAe,CAAC,KAAK,EAAE,OAAO,EAAE;AAChD,IAAI,mBAAmB,CAAC,KAAK,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;AACjD,CAAC;;AC3BD;AACA;AACA;AACA,AAAO,SAAS,gBAAgB,CAAC,OAAO,EAAE,KAAK,EAAE;AACjD,IAAI,IAAI,WAAW,CAAC;AACpB,IAAI,IAAI;AACR,QAAQ,WAAW,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;AACrC,KAAK;AACL,IAAI,OAAO,KAAK,EAAE;AAClB,QAAQ,OAAO,CAAC,GAAG,CAAC,gDAAgD,CAAC,CAAC;AACtE,QAAQ,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;AAC3B,KAAK;AACL,IAAI,IAAI,WAAW,IAAI,WAAW,CAAC,KAAK,EAAE;AAC1C,QAAQ,WAAW,CAAC,KAAK,CAAC,CAAC,KAAK,KAAK;AACrC,YAAY,OAAO,CAAC,GAAG,CAAC,gDAAgD,CAAC,CAAC;AAC1E,YAAY,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;AAC/B,SAAS,CAAC,CAAC;AACX,KAAK;AACL,CAAC;;AClBD;AACA,AAEA,SAAS,QAAQ,CAAC,KAAK,EAAE,kBAAkB,EAAE,SAAS,EAAE;AACxD,IAAI,MAAM,KAAK,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC;AAC7D,IAAI,IAAI,kBAAkB,EAAE;AAC5B,QAAQ,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,SAAS,CAAC,CAAC,EAAE,kBAAkB,CAAC,CAAC,CAAC,CAAC,CAAC;AACzE,KAAK;AACL,IAAI,OAAO,EAAE,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC;AAC/C,CAAC;AACD;AACA,AAAO,SAAS,cAAc,CAAC,KAAK,EAAE,KAAK,EAAE;AAC7C,IAAI,MAAM,aAAa,GAAG,KAAK,CAAC,KAAK,CAAC,KAAK,IAAI,EAAE,CAAC;AAClD,IAAI,IAAI,KAAK,YAAY,KAAK,EAAE;AAChC,QAAQ,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,cAAc,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE;AACjE,YAAY,KAAK;AACjB,YAAY,MAAM,EAAE,CAAC,KAAK,CAAC;AAC3B,SAAS,CAAC,CAAC;AACX,QAAQ,aAAa,CAAC,OAAO,CAAC,CAAC,OAAO,KAAK,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC;AAC7E,QAAQ,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AACrC,KAAK;AACL,IAAI,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE;AAC/B,QAAQ,MAAM,IAAI,cAAc,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC;AAC5D,KAAK;AACL,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE;AACxB,QAAQ,MAAM,IAAI,cAAc,CAAC,CAAC,0BAA0B,CAAC,CAAC,CAAC;AAC/D,KAAK;AACL;AACA,IAAI,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,EAAE,QAAQ,IAAI,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,MAAM,GAAG,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;AACvG,IAAI,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAQ,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;AACjC,KAAK;AACL,IAAI,MAAM,MAAM,GAAG,EAAE,CAAC;AACtB,IAAI,MAAM,QAAQ,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,KAAK;AAC5C,QAAQ,IAAI,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AAC7C,QAAQ,IAAI,KAAK,CAAC,SAAS,EAAE;AAC7B,YAAY,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;AACpD,SAAS;AACT,QAAQ,OAAO,OAAO;AACtB,aAAa,IAAI,CAAC,CAAC,KAAK,KAAK;AAC7B,YAAY,OAAO,OAAO,CAAC,KAAK,CAAC,CAAC;AAClC,SAAS,CAAC;AACV,aAAa,KAAK,CAAC,CAAC,KAAK,KAAK,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,KAAK,CAAC,CAAC;AACP,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,MAAM;AAC5C,QAAQ,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;AACjC,YAAY,OAAO;AACnB,SAAS;AACT,QAAQ,MAAM,KAAK,GAAG,IAAI,cAAc,CAAC,MAAM,CAAC,CAAC;AACjD,QAAQ,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;AAC7B,YAAY,KAAK;AACjB,YAAY,MAAM;AAClB,SAAS,CAAC,CAAC;AACX,QAAQ,aAAa,CAAC,OAAO,CAAC,CAAC,OAAO,KAAK,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC;AAC7E,QAAQ,MAAM,KAAK,CAAC;AACpB,KAAK,CAAC,CAAC;AACP,CAAC;;ACxDM,SAAS,cAAc,CAAC,KAAK,EAAE,kBAAkB,EAAE,OAAO,EAAE;AACnE,IAAI,IAAI,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;AAC3C,QAAQ,kBAAkB,CAAC,OAAO,CAAC,CAAC,WAAW,KAAK,cAAc,CAAC,KAAK,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC,CAAC;AACjG,QAAQ,OAAO;AACf,KAAK;AACL,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,kBAAkB,CAAC,EAAE;AAC1C,QAAQ,OAAO;AACf,KAAK;AACL;AACA;AACA,IAAI,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;AAC1E,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,KAAK,OAAO,EAAE;AAC5D,YAAY,KAAK,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACzD,YAAY,OAAO;AACnB,SAAS;AACT,KAAK;AACL,CAAC;;ACZM,SAAS,kBAAkB,CAAC,OAAO,EAAE;AAC5C,IAAI,MAAM,KAAK,GAAG;AAClB,QAAQ,KAAK,EAAE,EAAE;AACjB,QAAQ,GAAG,EAAE,YAAY,CAAC,OAAO,IAAI,OAAO,CAAC,GAAG,CAAC;AACjD,KAAK,CAAC;AACN,IAAI,IAAI,OAAO,IAAI,OAAO,CAAC,SAAS,EAAE;AACtC,QAAQ,KAAK,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC;AAC5C,KAAK;AACL,IAAI,OAAO;AACX,QAAQ,EAAE,EAAEA,UAAE,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;AAChC,QAAQ,KAAK,EAAEC,aAAK,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;AACtC,QAAQ,OAAO,EAAEC,eAAO,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;AAC1C,QAAQ,cAAc,EAAE,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;AACxD,QAAQ,OAAO,EAAEC,cAAO,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;AAC1C,KAAK,CAAC;AACN,CAAC;;ACnBD;AACA;AACA;AACA,AAAO,SAAS,sBAAsB,CAAC,OAAO,EAAE;AAChD,IAAI,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;AAClD,IAAI,OAAO,aAAa,CAAC,OAAO,CAAC,qBAAqB,EAAE,CAAC,CAAC,KAAK;AAC/D,QAAQ,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC;AAC1D,KAAK,CAAC,CAAC;AACP,CAAC;;ACNM,eAAe,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE;AAC5C,IAAI,OAAOC,MAAU,CAAC,MAAM,EAAE,OAAO,OAAO,KAAK,QAAQ,GAAG,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC;AACvG,CAAC;;ACFM,eAAe,MAAM,CAAC,MAAM,EAAE,OAAO,EAAE,SAAS,EAAE;AACzD,IAAI,OAAOC,QAAY,CAAC,MAAM,EAAE,OAAO,OAAO,KAAK,QAAQ,GAAG,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC;AACpH,CAAC;;ACFM,eAAe,gBAAgB,CAAC,KAAK,EAAE,KAAK,EAAE;AACrD;AACA,IAAI,MAAM,gBAAgB,GAAG,MAAMC,QAAM,CAAC,KAAK,CAAC,MAAM,EAAE,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ;AACzF,UAAU,sBAAsB,CAAC,KAAK,CAAC,OAAO,CAAC;AAC/C,UAAU,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;AAC1C,IAAI,IAAI,CAAC,gBAAgB,EAAE;AAC3B,QAAQ,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AACzG,QAAQ,OAAO,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC;AACxF,KAAK;AACL,IAAI,OAAO,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC;AACtC,QAAQ,EAAE,EAAE,KAAK,CAAC,EAAE;AACpB,QAAQ,IAAI,EAAE,KAAK,CAAC,IAAI;AACxB,QAAQ,OAAO,EAAE,OAAO,KAAK,CAAC,OAAO,KAAK,QAAQ;AAClD,cAAc,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC;AACvC,cAAc,KAAK,CAAC,OAAO;AAC3B,KAAK,CAAC,CAAC;AACP,CAAC;;AClBD,MAAM,eAAe,GAAG;AACxB,IAAI,gBAAgB;AACpB,IAAI,qBAAqB;AACzB,IAAI,mBAAmB;AACvB,CAAC,CAAC;AACF;AACA,AAAO,SAAS,iBAAiB,CAAC,OAAO,EAAE;AAC3C,IAAI,OAAO,eAAe,CAAC,MAAM,CAAC,CAAC,MAAM,KAAK,EAAE,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;AAC5E,CAAC;;ACRD;AACA,AACO,SAAS,UAAU,CAAC,OAAO,EAAE;AACpC;AACA;AACA,IAAI,IAAI,OAAO,CAAC,IAAI;AACpB,QAAQ,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC7C,IAAI,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAK;AAC5C,QAAQ,IAAI,IAAI,GAAG,EAAE,CAAC;AACtB,QAAQ,OAAO,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;AACpC;AACA,QAAQ,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,KAAK,MAAM,CAAC,IAAI,cAAc,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AAC5E,QAAQ,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAK,MAAM,IAAI,IAAI,KAAK,CAAC,CAAC,CAAC;AACvD,QAAQ,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM;AAChC,YAAY,IAAI;AAChB,gBAAgB,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC;AAC1C,aAAa;AACb,YAAY,OAAO,KAAK,EAAE;AAC1B,gBAAgB,KAAK,CAAC,OAAO,GAAG,cAAc,CAAC;AAC/C,gBAAgB,KAAK,CAAC,MAAM,GAAG,GAAG,CAAC;AACnC,gBAAgB,MAAM,CAAC,IAAI,cAAc,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACpD,aAAa;AACb,SAAS,CAAC,CAAC;AACX,KAAK,CAAC,CAAC;AACP,CAAC;;ACtBM,eAAe,UAAU,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,IAAI,EAAE;AAC7E,IAAI,IAAI,QAAQ,CAAC;AACjB,IAAI,IAAI;AACR,QAAQ,QAAQ,GAAG,IAAI,GAAG,CAAC,OAAO,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC;AACrE,KAAK;AACL,IAAI,OAAO,KAAK,EAAE;AAClB,QAAQ,QAAQ,CAAC,SAAS,CAAC,GAAG,EAAE;AAChC,YAAY,cAAc,EAAE,kBAAkB;AAC9C,SAAS,CAAC,CAAC;AACX,QAAQ,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC;AACpC,YAAY,KAAK,EAAE,CAAC,iCAAiC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AACpE,SAAS,CAAC,CAAC,CAAC;AACZ,QAAQ,OAAO;AACf,KAAK;AACL,IAAI,MAAM,cAAc,GAAG,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC;AAClF,IAAI,MAAM,mBAAmB,GAAG,OAAO,IAAI,KAAK,UAAU,CAAC;AAC3D,IAAI,IAAI,cAAc,EAAE;AACxB,QAAQ,IAAI,mBAAmB,EAAE;AACjC,YAAY,OAAO,IAAI,EAAE,CAAC;AAC1B,SAAS;AACT,aAAa;AACb,YAAY,OAAO,OAAO,CAAC,kBAAkB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;AACjE,SAAS;AACT,KAAK;AACL,IAAI,MAAM,cAAc,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjE,IAAI,IAAI,cAAc,EAAE;AACxB,QAAQ,QAAQ,CAAC,SAAS,CAAC,GAAG,EAAE;AAChC,YAAY,cAAc,EAAE,kBAAkB;AAC9C,SAAS,CAAC,CAAC;AACX,QAAQ,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC;AACpC,YAAY,KAAK,EAAE,CAAC,0BAA0B,EAAE,cAAc,CAAC,CAAC;AAChE,SAAS,CAAC,CAAC,CAAC;AACZ,QAAQ,OAAO;AACf,KAAK;AACL,IAAI,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;AACxD,IAAI,MAAM,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;AACnE,IAAI,MAAM,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,mBAAmB,CAAC,CAAC;AACpD,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,SAAS,CAAC,qBAAqB,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;AACjE;AACA;AACA,IAAI,IAAI,UAAU,GAAG,KAAK,CAAC;AAC3B,IAAI,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM;AACrC,QAAQ,UAAU,GAAG,IAAI,CAAC;AAC1B,QAAQ,QAAQ,CAAC,UAAU,GAAG,GAAG,CAAC;AAClC,QAAQ,QAAQ,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;AAC3C,KAAK,EAAE,IAAI,CAAC,CAAC,KAAK,EAAE,CAAC;AACrB,IAAI,IAAI;AACR,QAAQ,MAAM,OAAO,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,CAAC;AAClD,QAAQ,MAAM,QAAQ,CAAC,gBAAgB,CAAC;AACxC,YAAY,EAAE,EAAE,EAAE;AAClB,YAAY,IAAI,EAAE,SAAS;AAC3B,YAAY,OAAO,EAAE,OAAO;AAC5B,YAAY,SAAS,EAAE,eAAe;AACtC,SAAS,CAAC,CAAC;AACX,QAAQ,YAAY,CAAC,OAAO,CAAC,CAAC;AAC9B,QAAQ,IAAI,UAAU;AACtB,YAAY,OAAO;AACnB,QAAQ,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;AAC7B,KAAK;AACL,IAAI,OAAO,KAAK,EAAE;AAClB,QAAQ,YAAY,CAAC,OAAO,CAAC,CAAC;AAC9B,QAAQ,IAAI,UAAU;AACtB,YAAY,OAAO;AACnB,QAAQ,MAAM,GAAG,GAAG,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACzC,QAAQ,MAAM,YAAY,GAAG,GAAG,CAAC,OAAO;AACxC,cAAc,CAAC,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;AAC3C,cAAc,sCAAsC,CAAC;AACrD,QAAQ,QAAQ,CAAC,UAAU,GAAG,OAAO,GAAG,CAAC,MAAM,KAAK,WAAW,GAAG,GAAG,CAAC,MAAM,GAAG,GAAG,CAAC;AACnF,QAAQ,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACjC,QAAQ,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC;AACpC,YAAY,KAAK,EAAE,YAAY;AAC/B,SAAS,CAAC,CAAC,CAAC;AACZ,KAAK;AACL,CAAC;;AC3EM,SAAS,yBAAyB,CAAC,OAAO,EAAE,QAAQ,EAAE;AAC7D,IAAI,QAAQ,CAAC,SAAS,CAAC,GAAG,EAAE;AAC5B,QAAQ,cAAc,EAAE,kBAAkB;AAC1C,KAAK,CAAC,CAAC;AACP,IAAI,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC;AAChC,QAAQ,KAAK,EAAE,CAAC,eAAe,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;AAChE,KAAK,CAAC,CAAC,CAAC;AACR,CAAC;;ACJM,SAAS,oBAAoB,CAAC,QAAQ,EAAE,EAAE,IAAI,GAAG,sBAAsB,EAAE,kBAAkB,GAAG,yBAAyB,EAAE,GAAG,GAAG,YAAY,EAAE,GAAG,GAAG,EAAE,EAAE;AAC9J,IAAI,OAAO,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,EAAE;AAC3C,QAAQ,IAAI;AACZ,QAAQ,kBAAkB;AAC1B,QAAQ,GAAG;AACX,KAAK,CAAC,CAAC;AACP,CAAC;;ACFD;AACA,MAAM,QAAQ,CAAC;AACf,IAAI,WAAW,CAAC,OAAO,EAAE;AACzB,QAAQ,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE;AACzC,YAAY,MAAM,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC;AAC3E,SAAS;AACT,QAAQ,MAAM,KAAK,GAAG;AACtB,YAAY,YAAY,EAAE,kBAAkB,CAAC,OAAO,CAAC;AACrD,YAAY,MAAM,EAAE,OAAO,CAAC,MAAM;AAClC,YAAY,KAAK,EAAE,EAAE;AACrB,YAAY,GAAG,EAAE,YAAY,CAAC,OAAO,CAAC,GAAG,CAAC;AAC1C,SAAS,CAAC;AACV,QAAQ,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;AACpD,QAAQ,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,CAAC;AACxD,QAAQ,IAAI,CAAC,EAAE,GAAG,KAAK,CAAC,YAAY,CAAC,EAAE,CAAC;AACxC,QAAQ,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,YAAY,CAAC,KAAK,CAAC;AAC9C,QAAQ,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC;AAClD,QAAQ,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC;AAChE,QAAQ,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,OAAO,CAAC;AAClD,QAAQ,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;AACnE,KAAK;AACL,CAAC;;;;"} \ No newline at end of file diff --git a/node_modules/@octokit/webhooks/package.json b/node_modules/@octokit/webhooks/package.json new file mode 100644 index 0000000..b45a0b8 --- /dev/null +++ b/node_modules/@octokit/webhooks/package.json @@ -0,0 +1,53 @@ +{ + "name": "@octokit/webhooks", + "description": "GitHub webhook events toolset for Node.js", + "version": "10.3.0", + "license": "MIT", + "files": [ + "dist-*/", + "bin/" + ], + "source": "dist-src/index.js", + "types": "dist-types/index.d.ts", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "pika": true, + "sideEffects": false, + "keywords": [], + "repository": "github:octokit/webhooks.js", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + }, + "devDependencies": { + "@jest/types": "^29.0.0", + "@octokit/tsconfig": "^1.0.1", + "@octokit/webhooks-schemas": "6.5.0", + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.9.2", + "@pika/plugin-build-web": "^0.9.2", + "@pika/plugin-ts-standard-pkg": "^0.9.2", + "@types/jest": "^29.0.0", + "@types/json-schema": "^7.0.7", + "@types/node": "^16.0.0", + "@types/node-fetch": "^2.5.8", + "@types/prettier": "^2.0.0", + "axios": "^1.0.0", + "express": "^4.17.1", + "jest": "^29.0.0", + "node-fetch": "^2.6.7", + "prettier": "2.7.1", + "prettier-plugin-packagejson": "^2.2.9", + "ts-jest": "^29.0.0", + "ts-node": "^10.0.0", + "typescript": "^4.1.5" + }, + "engines": { + "node": ">= 14" + }, + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@types/node/LICENSE b/node_modules/@types/node/LICENSE new file mode 100755 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/node/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node/README.md b/node_modules/@types/node/README.md new file mode 100755 index 0000000..1837bb8 --- /dev/null +++ b/node_modules/@types/node/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/node` + +# Summary +This package contains type definitions for Node.js (https://nodejs.org/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node. + +### Additional Details + * Last updated: Thu, 20 Oct 2022 15:33:11 GMT + * Dependencies: none + * Global values: `AbortController`, `AbortSignal`, `__dirname`, `__filename`, `console`, `exports`, `gc`, `global`, `module`, `process`, `require`, `structuredClone` + +# Credits +These definitions were written by [Microsoft TypeScript](https://github.com/Microsoft), [DefinitelyTyped](https://github.com/DefinitelyTyped), [Alberto Schiabel](https://github.com/jkomyno), [Alvis HT Tang](https://github.com/alvis), [Andrew Makarov](https://github.com/r3nya), [Benjamin Toueg](https://github.com/btoueg), [Chigozirim C.](https://github.com/smac89), [David Junger](https://github.com/touffy), [Deividas Bakanas](https://github.com/DeividasBakanas), [Eugene Y. Q. Shen](https://github.com/eyqs), [Hannes Magnusson](https://github.com/Hannes-Magnusson-CK), [Huw](https://github.com/hoo29), [Kelvin Jin](https://github.com/kjin), [Klaus Meinhardt](https://github.com/ajafff), [Lishude](https://github.com/islishude), [Mariusz Wiktorczyk](https://github.com/mwiktorczyk), [Mohsen Azimi](https://github.com/mohsen1), [Nicolas Even](https://github.com/n-e), [Nikita Galkin](https://github.com/galkin), [Parambir Singh](https://github.com/parambirs), [Sebastian Silbermann](https://github.com/eps1lon), [Simon Schick](https://github.com/SimonSchick), [Thomas den Hollander](https://github.com/ThomasdenH), [Wilco Bakker](https://github.com/WilcoBakker), [wwwy3y3](https://github.com/wwwy3y3), [Samuel Ainsworth](https://github.com/samuela), [Kyle Uehlein](https://github.com/kuehlein), [Thanik Bhongbhibhat](https://github.com/bhongy), [Marcin Kopacz](https://github.com/chyzwar), [Trivikram Kamat](https://github.com/trivikr), [Junxiao Shi](https://github.com/yoursunny), [Ilia Baryshnikov](https://github.com/qwelias), [ExE Boss](https://github.com/ExE-Boss), [Piotr Błażejewicz](https://github.com/peterblazejewicz), [Anna Henningsen](https://github.com/addaleax), [Victor Perin](https://github.com/victorperin), [Yongsheng Zhang](https://github.com/ZYSzys), [NodeJS Contributors](https://github.com/NodeJS), [Linus Unnebäck](https://github.com/LinusU), [wafuwafu13](https://github.com/wafuwafu13), and [Matteo Collina](https://github.com/mcollina). diff --git a/node_modules/@types/node/assert.d.ts b/node_modules/@types/node/assert.d.ts new file mode 100755 index 0000000..8e02a66 --- /dev/null +++ b/node_modules/@types/node/assert.d.ts @@ -0,0 +1,911 @@ +/** + * The `assert` module provides a set of assertion functions for verifying + * invariants. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/assert.js) + */ +declare module 'assert' { + /** + * An alias of {@link ok}. + * @since v0.5.9 + * @param value The input that is checked for being truthy. + */ + function assert(value: unknown, message?: string | Error): asserts value; + namespace assert { + /** + * Indicates the failure of an assertion. All errors thrown by the `assert` module + * will be instances of the `AssertionError` class. + */ + class AssertionError extends Error { + actual: unknown; + expected: unknown; + operator: string; + generatedMessage: boolean; + code: 'ERR_ASSERTION'; + constructor(options?: { + /** If provided, the error message is set to this value. */ + message?: string | undefined; + /** The `actual` property on the error instance. */ + actual?: unknown | undefined; + /** The `expected` property on the error instance. */ + expected?: unknown | undefined; + /** The `operator` property on the error instance. */ + operator?: string | undefined; + /** If provided, the generated stack trace omits frames before this function. */ + // tslint:disable-next-line:ban-types + stackStartFn?: Function | undefined; + }); + } + /** + * This feature is currently experimental and behavior might still change. + * @since v14.2.0, v12.19.0 + * @experimental + */ + class CallTracker { + /** + * The wrapper function is expected to be called exactly `exact` times. If the + * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an + * error. + * + * ```js + * import assert from 'assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func); + * ``` + * @since v14.2.0, v12.19.0 + * @param [fn='A no-op function'] + * @param [exact=1] + * @return that wraps `fn`. + */ + calls(exact?: number): () => void; + calls any>(fn?: Func, exact?: number): Func; + /** + * The arrays contains information about the expected and actual number of calls of + * the functions that have not been called the expected number of times. + * + * ```js + * import assert from 'assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * function foo() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * // Returns an array containing information on callsfunc() + * tracker.report(); + * // [ + * // { + * // message: 'Expected the func function to be executed 2 time(s) but was + * // executed 0 time(s).', + * // actual: 0, + * // expected: 2, + * // operator: 'func', + * // stack: stack trace + * // } + * // ] + * ``` + * @since v14.2.0, v12.19.0 + * @return of objects containing information about the wrapper functions returned by `calls`. + */ + report(): CallTrackerReportInformation[]; + /** + * Iterates through the list of functions passed to `tracker.calls()` and will throw an error for functions that + * have not been called the expected number of times. + * + * ```js + * import assert from 'assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * callsfunc(); + * + * // Will throw an error since callsfunc() was only called once. + * tracker.verify(); + * ``` + * @since v14.2.0, v12.19.0 + */ + verify(): void; + } + interface CallTrackerReportInformation { + message: string; + /** The actual number of times the function was called. */ + actual: number; + /** The number of times the function was expected to be called. */ + expected: number; + /** The name of the function that is wrapped. */ + operator: string; + /** A stack trace of the function. */ + stack: object; + } + type AssertPredicate = RegExp | (new () => object) | ((thrown: unknown) => boolean) | object | Error; + /** + * Throws an `AssertionError` with the provided error message or a default + * error message. If the `message` parameter is an instance of an `Error` then + * it will be thrown instead of the `AssertionError`. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.fail(); + * // AssertionError [ERR_ASSERTION]: Failed + * + * assert.fail('boom'); + * // AssertionError [ERR_ASSERTION]: boom + * + * assert.fail(new TypeError('need array')); + * // TypeError: need array + * ``` + * + * Using `assert.fail()` with more than two arguments is possible but deprecated. + * See below for further details. + * @since v0.1.21 + * @param [message='Failed'] + */ + function fail(message?: string | Error): never; + /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ + function fail( + actual: unknown, + expected: unknown, + message?: string | Error, + operator?: string, + // tslint:disable-next-line:ban-types + stackStartFn?: Function + ): never; + /** + * Tests if `value` is truthy. It is equivalent to`assert.equal(!!value, true, message)`. + * + * If `value` is not truthy, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is `undefined`, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``. + * + * Be aware that in the `repl` the error message will be different to the one + * thrown in a file! See below for further details. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.ok(true); + * // OK + * assert.ok(1); + * // OK + * + * assert.ok(); + * // AssertionError: No value argument passed to `assert.ok()` + * + * assert.ok(false, 'it\'s false'); + * // AssertionError: it's false + * + * // In the repl: + * assert.ok(typeof 123 === 'string'); + * // AssertionError: false == true + * + * // In a file (e.g. test.js): + * assert.ok(typeof 123 === 'string'); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(typeof 123 === 'string') + * + * assert.ok(false); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(false) + * + * assert.ok(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(0) + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * // Using `assert()` works the same: + * assert(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert(0) + * ``` + * @since v0.1.21 + */ + function ok(value: unknown, message?: string | Error): asserts value; + /** + * **Strict assertion mode** + * + * An alias of {@link strictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link strictEqual} instead. + * + * Tests shallow, coercive equality between the `actual` and `expected` parameters + * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled + * and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'assert'; + * + * assert.equal(1, 1); + * // OK, 1 == 1 + * assert.equal(1, '1'); + * // OK, 1 == '1' + * assert.equal(NaN, NaN); + * // OK + * + * assert.equal(1, 2); + * // AssertionError: 1 == 2 + * assert.equal({ a: { b: 1 } }, { a: { b: 1 } }); + * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } } + * ``` + * + * If the values are not equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function equal(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead. + * + * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is + * specially handled and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'assert'; + * + * assert.notEqual(1, 2); + * // OK + * + * assert.notEqual(1, 1); + * // AssertionError: 1 != 1 + * + * assert.notEqual(1, '1'); + * // AssertionError: 1 != '1' + * ``` + * + * If the values are equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default error + * message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function notEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link deepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead. + * + * Tests for deep equality between the `actual` and `expected` parameters. Consider + * using {@link deepStrictEqual} instead. {@link deepEqual} can have + * surprising results. + * + * _Deep equality_ means that the enumerable "own" properties of child objects + * are also recursively evaluated by the following rules. + * @since v0.1.21 + */ + function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notDeepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead. + * + * Tests for any deep inequality. Opposite of {@link deepEqual}. + * + * ```js + * import assert from 'assert'; + * + * const obj1 = { + * a: { + * b: 1 + * } + * }; + * const obj2 = { + * a: { + * b: 2 + * } + * }; + * const obj3 = { + * a: { + * b: 1 + * } + * }; + * const obj4 = Object.create(obj1); + * + * assert.notDeepEqual(obj1, obj1); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj2); + * // OK + * + * assert.notDeepEqual(obj1, obj3); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj4); + * // OK + * ``` + * + * If the values are deeply equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a default + * error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests strict equality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'assert/strict'; + * + * assert.strictEqual(1, 2); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + * // 1 !== 2 + * + * assert.strictEqual(1, 1); + * // OK + * + * assert.strictEqual('Hello foobar', 'Hello World!'); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + actual - expected + * // + * // + 'Hello foobar' + * // - 'Hello World!' + * // ^ + * + * const apples = 1; + * const oranges = 2; + * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`); + * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2 + * + * assert.strictEqual(1, '1', new TypeError('Inputs are not identical')); + * // TypeError: Inputs are not identical + * ``` + * + * If the values are not strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests strict inequality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'assert/strict'; + * + * assert.notStrictEqual(1, 2); + * // OK + * + * assert.notStrictEqual(1, 1); + * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to: + * // + * // 1 + * + * assert.notStrictEqual(1, '1'); + * // OK + * ``` + * + * If the values are strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests for deep equality between the `actual` and `expected` parameters. + * "Deep" equality means that the enumerable "own" properties of child objects + * are recursively evaluated also by the following rules. + * @since v1.2.0 + */ + function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.notDeepStrictEqual({ a: 1 }, { a: '1' }); + * // OK + * ``` + * + * If the values are deeply and strictly equal, an `AssertionError` is thrown + * with a `message` property set equal to the value of the `message` parameter. If + * the `message` parameter is undefined, a default error message is assigned. If + * the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v1.2.0 + */ + function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Expects the function `fn` to throw an error. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * a validation object where each property will be tested for strict deep equality, + * or an instance of error where each property will be tested for strict deep + * equality including the non-enumerable `message` and `name` properties. When + * using an object, it is also possible to use a regular expression, when + * validating against a string property. See below for examples. + * + * If specified, `message` will be appended to the message provided by the`AssertionError` if the `fn` call fails to throw or in case the error validation + * fails. + * + * Custom validation object/error instance: + * + * ```js + * import assert from 'assert/strict'; + * + * const err = new TypeError('Wrong value'); + * err.code = 404; + * err.foo = 'bar'; + * err.info = { + * nested: true, + * baz: 'text' + * }; + * err.reg = /abc/i; + * + * assert.throws( + * () => { + * throw err; + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * info: { + * nested: true, + * baz: 'text' + * } + * // Only properties on the validation object will be tested for. + * // Using nested objects requires all properties to be present. Otherwise + * // the validation is going to fail. + * } + * ); + * + * // Using regular expressions to validate error properties: + * throws( + * () => { + * throw err; + * }, + * { + * // The `name` and `message` properties are strings and using regular + * // expressions on those will match against the string. If they fail, an + * // error is thrown. + * name: /^TypeError$/, + * message: /Wrong/, + * foo: 'bar', + * info: { + * nested: true, + * // It is not possible to use regular expressions for nested properties! + * baz: 'text' + * }, + * // The `reg` property contains a regular expression and only if the + * // validation object contains an identical regular expression, it is going + * // to pass. + * reg: /abc/i + * } + * ); + * + * // Fails due to the different `message` and `name` properties: + * throws( + * () => { + * const otherErr = new Error('Not found'); + * // Copy all enumerable properties from `err` to `otherErr`. + * for (const [key, value] of Object.entries(err)) { + * otherErr[key] = value; + * } + * throw otherErr; + * }, + * // The error's `message` and `name` properties will also be checked when using + * // an error as validation object. + * err + * ); + * ``` + * + * Validate instanceof using constructor: + * + * ```js + * import assert from 'assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * Error + * ); + * ``` + * + * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions): + * + * Using a regular expression runs `.toString` on the error object, and will + * therefore also include the error name. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * /^Error: Wrong value$/ + * ); + * ``` + * + * Custom error validation: + * + * The function must return `true` to indicate all internal validations passed. + * It will otherwise fail with an `AssertionError`. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * (err) => { + * assert(err instanceof Error); + * assert(/value/.test(err)); + * // Avoid returning anything from validation functions besides `true`. + * // Otherwise, it's not clear what part of the validation failed. Instead, + * // throw an error about the specific validation that failed (as done in this + * // example) and add as much helpful debugging information to that error as + * // possible. + * return true; + * }, + * 'unexpected error' + * ); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Using the same + * message as the thrown error message is going to result in an`ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using + * a string as the second argument gets considered: + * + * ```js + * import assert from 'assert/strict'; + * + * function throwingFirst() { + * throw new Error('First'); + * } + * + * function throwingSecond() { + * throw new Error('Second'); + * } + * + * function notThrowing() {} + * + * // The second argument is a string and the input function threw an Error. + * // The first case will not throw as it does not match for the error message + * // thrown by the input function! + * assert.throws(throwingFirst, 'Second'); + * // In the next example the message has no benefit over the message from the + * // error and since it is not clear if the user intended to actually match + * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error. + * assert.throws(throwingSecond, 'Second'); + * // TypeError [ERR_AMBIGUOUS_ARGUMENT] + * + * // The string is only used (as message) in case the function does not throw: + * assert.throws(notThrowing, 'Second'); + * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second + * + * // If it was intended to match for the error message do this instead: + * // It does not throw because the error messages match. + * assert.throws(throwingSecond, /Second$/); + * + * // If the error message does not match, an AssertionError is thrown. + * assert.throws(throwingFirst, /Second$/); + * // AssertionError [ERR_ASSERTION] + * ``` + * + * Due to the confusing error-prone notation, avoid a string as the second + * argument. + * @since v0.1.21 + */ + function throws(block: () => unknown, message?: string | Error): void; + function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Asserts that the function `fn` does not throw an error. + * + * Using `assert.doesNotThrow()` is actually not useful because there + * is no benefit in catching an error and then rethrowing it. Instead, consider + * adding a comment next to the specific code path that should not throw and keep + * error messages as expressive as possible. + * + * When `assert.doesNotThrow()` is called, it will immediately call the `fn`function. + * + * If an error is thrown and it is the same type as that specified by the `error`parameter, then an `AssertionError` is thrown. If the error is of a + * different type, or if the `error` parameter is undefined, the error is + * propagated back to the caller. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) or a validation + * function. See {@link throws} for more details. + * + * The following, for instance, will throw the `TypeError` because there is no + * matching error type in the assertion: + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError + * ); + * ``` + * + * However, the following will result in an `AssertionError` with the message + * 'Got unwanted exception...': + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * TypeError + * ); + * ``` + * + * If an `AssertionError` is thrown and a value is provided for the `message`parameter, the value of `message` will be appended to the `AssertionError` message: + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * /Wrong value/, + * 'Whoops' + * ); + * // Throws: AssertionError: Got unwanted exception: Whoops + * ``` + * @since v0.1.21 + */ + function doesNotThrow(block: () => unknown, message?: string | Error): void; + function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Throws `value` if `value` is not `undefined` or `null`. This is useful when + * testing the `error` argument in callbacks. The stack trace contains all frames + * from the error passed to `ifError()` including the potential new frames for`ifError()` itself. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.ifError(null); + * // OK + * assert.ifError(0); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0 + * assert.ifError('error'); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error' + * assert.ifError(new Error()); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error + * + * // Create some random error frames. + * let err; + * (function errorFrame() { + * err = new Error('test error'); + * })(); + * + * (function ifErrorFrame() { + * assert.ifError(err); + * })(); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error + * // at ifErrorFrame + * // at errorFrame + * ``` + * @since v0.1.97 + */ + function ifError(value: unknown): asserts value is null | undefined; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.rejects()` will return a rejected `Promise` with that error. If the + * function does not return a promise, `assert.rejects()` will return a rejected`Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases the error + * handler is skipped. + * + * Besides the async nature to await the completion behaves identically to {@link throws}. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * an object where each property will be tested for, or an instance of error where + * each property will be tested for including the non-enumerable `message` and`name` properties. + * + * If specified, `message` will be the message provided by the `AssertionError` if the `asyncFn` fails to reject. + * + * ```js + * import assert from 'assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * { + * name: 'TypeError', + * message: 'Wrong value' + * } + * ); + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * (err) => { + * assert.strictEqual(err.name, 'TypeError'); + * assert.strictEqual(err.message, 'Wrong value'); + * return true; + * } + * ); + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * assert.rejects( + * Promise.reject(new Error('Wrong value')), + * Error + * ).then(() => { + * // ... + * }); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Please read the + * example in {@link throws} carefully if using a string as the second + * argument gets considered. + * @since v10.0.0 + */ + function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise; + function rejects(block: (() => Promise) | Promise, error: AssertPredicate, message?: string | Error): Promise; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is not rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.doesNotReject()` will return a rejected `Promise` with that error. If + * the function does not return a promise, `assert.doesNotReject()` will return a + * rejected `Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases + * the error handler is skipped. + * + * Using `assert.doesNotReject()` is actually not useful because there is little + * benefit in catching a rejection and then rejecting it again. Instead, consider + * adding a comment next to the specific code path that should not reject and keep + * error messages as expressive as possible. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) or a validation + * function. See {@link throws} for more details. + * + * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}. + * + * ```js + * import assert from 'assert/strict'; + * + * await assert.doesNotReject( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError + * ); + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotReject(Promise.reject(new TypeError('Wrong value'))) + * .then(() => { + * // ... + * }); + * ``` + * @since v10.0.0 + */ + function doesNotReject(block: (() => Promise) | Promise, message?: string | Error): Promise; + function doesNotReject(block: (() => Promise) | Promise, error: AssertPredicate, message?: string | Error): Promise; + /** + * Expects the `string` input to match the regular expression. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.match('I will fail', /pass/); + * // AssertionError [ERR_ASSERTION]: The input did not match the regular ... + * + * assert.match(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.match('I will pass', /pass/); + * // OK + * ``` + * + * If the values do not match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function match(value: string, regExp: RegExp, message?: string | Error): void; + /** + * Expects the `string` input not to match the regular expression. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotMatch('I will fail', /fail/); + * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ... + * + * assert.doesNotMatch(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.doesNotMatch('I will pass', /different/); + * // OK + * ``` + * + * If the values do match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void; + const strict: Omit & { + (value: unknown, message?: string | Error): asserts value; + equal: typeof strictEqual; + notEqual: typeof notStrictEqual; + deepEqual: typeof deepStrictEqual; + notDeepEqual: typeof notDeepStrictEqual; + // Mapped types and assertion functions are incompatible? + // TS2775: Assertions require every name in the call target + // to be declared with an explicit type annotation. + ok: typeof ok; + strictEqual: typeof strictEqual; + deepStrictEqual: typeof deepStrictEqual; + ifError: typeof ifError; + strict: typeof strict; + }; + } + export = assert; +} +declare module 'node:assert' { + import assert = require('assert'); + export = assert; +} diff --git a/node_modules/@types/node/assert/strict.d.ts b/node_modules/@types/node/assert/strict.d.ts new file mode 100755 index 0000000..b4319b9 --- /dev/null +++ b/node_modules/@types/node/assert/strict.d.ts @@ -0,0 +1,8 @@ +declare module 'assert/strict' { + import { strict } from 'node:assert'; + export = strict; +} +declare module 'node:assert/strict' { + import { strict } from 'node:assert'; + export = strict; +} diff --git a/node_modules/@types/node/async_hooks.d.ts b/node_modules/@types/node/async_hooks.d.ts new file mode 100755 index 0000000..0bf4739 --- /dev/null +++ b/node_modules/@types/node/async_hooks.d.ts @@ -0,0 +1,501 @@ +/** + * The `async_hooks` module provides an API to track asynchronous resources. It + * can be accessed using: + * + * ```js + * import async_hooks from 'async_hooks'; + * ``` + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/async_hooks.js) + */ +declare module 'async_hooks' { + /** + * ```js + * import { executionAsyncId } from 'async_hooks'; + * + * console.log(executionAsyncId()); // 1 - bootstrap + * fs.open(path, 'r', (err, fd) => { + * console.log(executionAsyncId()); // 6 - open() + * }); + * ``` + * + * The ID returned from `executionAsyncId()` is related to execution timing, not + * causality (which is covered by `triggerAsyncId()`): + * + * ```js + * const server = net.createServer((conn) => { + * // Returns the ID of the server, not of the new connection, because the + * // callback runs in the execution scope of the server's MakeCallback(). + * async_hooks.executionAsyncId(); + * + * }).listen(port, () => { + * // Returns the ID of a TickObject (process.nextTick()) because all + * // callbacks passed to .listen() are wrapped in a nextTick(). + * async_hooks.executionAsyncId(); + * }); + * ``` + * + * Promise contexts may not get precise `executionAsyncIds` by default. + * See the section on `promise execution tracking`. + * @since v8.1.0 + * @return The `asyncId` of the current execution context. Useful to track when something calls. + */ + function executionAsyncId(): number; + /** + * Resource objects returned by `executionAsyncResource()` are most often internal + * Node.js handle objects with undocumented APIs. Using any functions or properties + * on the object is likely to crash your application and should be avoided. + * + * Using `executionAsyncResource()` in the top-level execution context will + * return an empty object as there is no handle or request object to use, + * but having an object representing the top-level can be helpful. + * + * ```js + * import { open } from 'fs'; + * import { executionAsyncId, executionAsyncResource } from 'async_hooks'; + * + * console.log(executionAsyncId(), executionAsyncResource()); // 1 {} + * open(new URL(import.meta.url), 'r', (err, fd) => { + * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap + * }); + * ``` + * + * This can be used to implement continuation local storage without the + * use of a tracking `Map` to store the metadata: + * + * ```js + * import { createServer } from 'http'; + * import { + * executionAsyncId, + * executionAsyncResource, + * createHook + * } from 'async_hooks'; + * const sym = Symbol('state'); // Private symbol to avoid pollution + * + * createHook({ + * init(asyncId, type, triggerAsyncId, resource) { + * const cr = executionAsyncResource(); + * if (cr) { + * resource[sym] = cr[sym]; + * } + * } + * }).enable(); + * + * const server = createServer((req, res) => { + * executionAsyncResource()[sym] = { state: req.url }; + * setTimeout(function() { + * res.end(JSON.stringify(executionAsyncResource()[sym])); + * }, 100); + * }).listen(3000); + * ``` + * @since v13.9.0, v12.17.0 + * @return The resource representing the current execution. Useful to store data within the resource. + */ + function executionAsyncResource(): object; + /** + * ```js + * const server = net.createServer((conn) => { + * // The resource that caused (or triggered) this callback to be called + * // was that of the new connection. Thus the return value of triggerAsyncId() + * // is the asyncId of "conn". + * async_hooks.triggerAsyncId(); + * + * }).listen(port, () => { + * // Even though all callbacks passed to .listen() are wrapped in a nextTick() + * // the callback itself exists because the call to the server's .listen() + * // was made. So the return value would be the ID of the server. + * async_hooks.triggerAsyncId(); + * }); + * ``` + * + * Promise contexts may not get valid `triggerAsyncId`s by default. See + * the section on `promise execution tracking`. + * @return The ID of the resource responsible for calling the callback that is currently being executed. + */ + function triggerAsyncId(): number; + interface HookCallbacks { + /** + * Called when a class is constructed that has the possibility to emit an asynchronous event. + * @param asyncId a unique ID for the async resource + * @param type the type of the async resource + * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created + * @param resource reference to the resource representing the async operation, needs to be released during destroy + */ + init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void; + /** + * When an asynchronous operation is initiated or completes a callback is called to notify the user. + * The before callback is called just before said callback is executed. + * @param asyncId the unique identifier assigned to the resource about to execute the callback. + */ + before?(asyncId: number): void; + /** + * Called immediately after the callback specified in before is completed. + * @param asyncId the unique identifier assigned to the resource which has executed the callback. + */ + after?(asyncId: number): void; + /** + * Called when a promise has resolve() called. This may not be in the same execution id + * as the promise itself. + * @param asyncId the unique id for the promise that was resolve()d. + */ + promiseResolve?(asyncId: number): void; + /** + * Called after the resource corresponding to asyncId is destroyed + * @param asyncId a unique ID for the async resource + */ + destroy?(asyncId: number): void; + } + interface AsyncHook { + /** + * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. + */ + enable(): this; + /** + * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. + */ + disable(): this; + } + /** + * Registers functions to be called for different lifetime events of each async + * operation. + * + * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the + * respective asynchronous event during a resource's lifetime. + * + * All callbacks are optional. For example, if only resource cleanup needs to + * be tracked, then only the `destroy` callback needs to be passed. The + * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section. + * + * ```js + * import { createHook } from 'async_hooks'; + * + * const asyncHook = createHook({ + * init(asyncId, type, triggerAsyncId, resource) { }, + * destroy(asyncId) { } + * }); + * ``` + * + * The callbacks will be inherited via the prototype chain: + * + * ```js + * class MyAsyncCallbacks { + * init(asyncId, type, triggerAsyncId, resource) { } + * destroy(asyncId) {} + * } + * + * class MyAddedCallbacks extends MyAsyncCallbacks { + * before(asyncId) { } + * after(asyncId) { } + * } + * + * const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); + * ``` + * + * Because promises are asynchronous resources whose lifecycle is tracked + * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises. + * @since v8.1.0 + * @param callbacks The `Hook Callbacks` to register + * @return Instance used for disabling and enabling hooks + */ + function createHook(callbacks: HookCallbacks): AsyncHook; + interface AsyncResourceOptions { + /** + * The ID of the execution context that created this async event. + * @default executionAsyncId() + */ + triggerAsyncId?: number | undefined; + /** + * Disables automatic `emitDestroy` when the object is garbage collected. + * This usually does not need to be set (even if `emitDestroy` is called + * manually), unless the resource's `asyncId` is retrieved and the + * sensitive API's `emitDestroy` is called with it. + * @default false + */ + requireManualDestroy?: boolean | undefined; + } + /** + * The class `AsyncResource` is designed to be extended by the embedder's async + * resources. Using this, users can easily trigger the lifetime events of their + * own resources. + * + * The `init` hook will trigger when an `AsyncResource` is instantiated. + * + * The following is an overview of the `AsyncResource` API. + * + * ```js + * import { AsyncResource, executionAsyncId } from 'async_hooks'; + * + * // AsyncResource() is meant to be extended. Instantiating a + * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * // async_hook.executionAsyncId() is used. + * const asyncResource = new AsyncResource( + * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false } + * ); + * + * // Run a function in the execution context of the resource. This will + * // * establish the context of the resource + * // * trigger the AsyncHooks before callbacks + * // * call the provided function `fn` with the supplied arguments + * // * trigger the AsyncHooks after callbacks + * // * restore the original execution context + * asyncResource.runInAsyncScope(fn, thisArg, ...args); + * + * // Call AsyncHooks destroy callbacks. + * asyncResource.emitDestroy(); + * + * // Return the unique ID assigned to the AsyncResource instance. + * asyncResource.asyncId(); + * + * // Return the trigger ID for the AsyncResource instance. + * asyncResource.triggerAsyncId(); + * ``` + */ + class AsyncResource { + /** + * AsyncResource() is meant to be extended. Instantiating a + * new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * async_hook.executionAsyncId() is used. + * @param type The type of async event. + * @param triggerAsyncId The ID of the execution context that created + * this async event (default: `executionAsyncId()`), or an + * AsyncResourceOptions object (since v9.3.0) + */ + constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions); + /** + * Binds the given function to the current execution context. + * + * The returned function will have an `asyncResource` property referencing + * the `AsyncResource` to which the function is bound. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current execution context. + * @param type An optional name to associate with the underlying `AsyncResource`. + */ + static bind any, ThisArg>( + fn: Func, + type?: string, + thisArg?: ThisArg + ): Func & { + asyncResource: AsyncResource; + }; + /** + * Binds the given function to execute to this `AsyncResource`'s scope. + * + * The returned function will have an `asyncResource` property referencing + * the `AsyncResource` to which the function is bound. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current `AsyncResource`. + */ + bind any>( + fn: Func + ): Func & { + asyncResource: AsyncResource; + }; + /** + * Call the provided function with the provided arguments in the execution context + * of the async resource. This will establish the context, trigger the AsyncHooks + * before callbacks, call the function, trigger the AsyncHooks after callbacks, and + * then restore the original execution context. + * @since v9.6.0 + * @param fn The function to call in the execution context of this async resource. + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runInAsyncScope(fn: (this: This, ...args: any[]) => Result, thisArg?: This, ...args: any[]): Result; + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + * @return A reference to `asyncResource`. + */ + emitDestroy(): this; + /** + * @return The unique `asyncId` assigned to the resource. + */ + asyncId(): number; + /** + * + * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor. + */ + triggerAsyncId(): number; + } + /** + * This class creates stores that stay coherent through asynchronous operations. + * + * While you can create your own implementation on top of the `async_hooks` module,`AsyncLocalStorage` should be preferred as it is a performant and memory safe + * implementation that involves significant optimizations that are non-obvious to + * implement. + * + * The following example uses `AsyncLocalStorage` to build a simple logger + * that assigns IDs to incoming HTTP requests and includes them in messages + * logged within each request. + * + * ```js + * import http from 'http'; + * import { AsyncLocalStorage } from 'async_hooks'; + * + * const asyncLocalStorage = new AsyncLocalStorage(); + * + * function logWithId(msg) { + * const id = asyncLocalStorage.getStore(); + * console.log(`${id !== undefined ? id : '-'}:`, msg); + * } + * + * let idSeq = 0; + * http.createServer((req, res) => { + * asyncLocalStorage.run(idSeq++, () => { + * logWithId('start'); + * // Imagine any chain of async operations here + * setImmediate(() => { + * logWithId('finish'); + * res.end(); + * }); + * }); + * }).listen(8080); + * + * http.get('http://localhost:8080'); + * http.get('http://localhost:8080'); + * // Prints: + * // 0: start + * // 1: start + * // 0: finish + * // 1: finish + * ``` + * + * Each instance of `AsyncLocalStorage` maintains an independent storage context. + * Multiple instances can safely exist simultaneously without risk of interfering + * with each other's data. + * @since v13.10.0, v12.17.0 + */ + class AsyncLocalStorage { + /** + * Disables the instance of `AsyncLocalStorage`. All subsequent calls + * to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again. + * + * When calling `asyncLocalStorage.disable()`, all current contexts linked to the + * instance will be exited. + * + * Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores + * provided by the `asyncLocalStorage`, as those objects are garbage collected + * along with the corresponding async resources. + * + * Use this method when the `asyncLocalStorage` is not in use anymore + * in the current process. + * @since v13.10.0, v12.17.0 + * @experimental + */ + disable(): void; + /** + * Returns the current store. + * If called outside of an asynchronous context initialized by + * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it + * returns `undefined`. + * @since v13.10.0, v12.17.0 + */ + getStore(): T | undefined; + /** + * Runs a function synchronously within a context and returns its + * return value. The store is not accessible outside of the callback function. + * The store is accessible to any asynchronous operations created within the + * callback. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `run()` too. + * The stacktrace is not impacted by this call and the context is exited. + * + * Example: + * + * ```js + * const store = { id: 2 }; + * try { + * asyncLocalStorage.run(store, () => { + * asyncLocalStorage.getStore(); // Returns the store object + * setTimeout(() => { + * asyncLocalStorage.getStore(); // Returns the store object + * }, 200); + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns undefined + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + */ + run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Runs a function synchronously outside of a context and returns its + * return value. The store is not accessible within the callback function or + * the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `exit()` too. + * The stacktrace is not impacted by this call and the context is re-entered. + * + * Example: + * + * ```js + * // Within a call to run + * try { + * asyncLocalStorage.getStore(); // Returns the store object or value + * asyncLocalStorage.exit(() => { + * asyncLocalStorage.getStore(); // Returns undefined + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns the same object or value + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + * @experimental + */ + exit(callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Transitions into the context for the remainder of the current + * synchronous execution and then persists the store through any following + * asynchronous calls. + * + * Example: + * + * ```js + * const store = { id: 1 }; + * // Replaces previous store with the given store object + * asyncLocalStorage.enterWith(store); + * asyncLocalStorage.getStore(); // Returns the store object + * someAsyncOperation(() => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * ``` + * + * This transition will continue for the _entire_ synchronous execution. + * This means that if, for example, the context is entered within an event + * handler subsequent event handlers will also run within that context unless + * specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons + * to use the latter method. + * + * ```js + * const store = { id: 1 }; + * + * emitter.on('my-event', () => { + * asyncLocalStorage.enterWith(store); + * }); + * emitter.on('my-event', () => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * + * asyncLocalStorage.getStore(); // Returns undefined + * emitter.emit('my-event'); + * asyncLocalStorage.getStore(); // Returns the same object + * ``` + * @since v13.11.0, v12.17.0 + * @experimental + */ + enterWith(store: T): void; + } +} +declare module 'node:async_hooks' { + export * from 'async_hooks'; +} diff --git a/node_modules/@types/node/buffer.d.ts b/node_modules/@types/node/buffer.d.ts new file mode 100755 index 0000000..5ec326d --- /dev/null +++ b/node_modules/@types/node/buffer.d.ts @@ -0,0 +1,2258 @@ +/** + * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many + * Node.js APIs support `Buffer`s. + * + * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and + * extends it with methods that cover additional use cases. Node.js APIs accept + * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well. + * + * While the `Buffer` class is available within the global scope, it is still + * recommended to explicitly reference it via an import or require statement. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Creates a zero-filled Buffer of length 10. + * const buf1 = Buffer.alloc(10); + * + * // Creates a Buffer of length 10, + * // filled with bytes which all have the value `1`. + * const buf2 = Buffer.alloc(10, 1); + * + * // Creates an uninitialized buffer of length 10. + * // This is faster than calling Buffer.alloc() but the returned + * // Buffer instance might contain old data that needs to be + * // overwritten using fill(), write(), or other functions that fill the Buffer's + * // contents. + * const buf3 = Buffer.allocUnsafe(10); + * + * // Creates a Buffer containing the bytes [1, 2, 3]. + * const buf4 = Buffer.from([1, 2, 3]); + * + * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries + * // are all truncated using `(value & 255)` to fit into the range 0–255. + * const buf5 = Buffer.from([257, 257.5, -255, '1']); + * + * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': + * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) + * // [116, 195, 169, 115, 116] (in decimal notation) + * const buf6 = Buffer.from('tést'); + * + * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. + * const buf7 = Buffer.from('tést', 'latin1'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/buffer.js) + */ +declare module 'buffer' { + import { BinaryLike } from 'node:crypto'; + import { ReadableStream as WebReadableStream } from 'node:stream/web'; + export const INSPECT_MAX_BYTES: number; + export const kMaxLength: number; + export const kStringMaxLength: number; + export const constants: { + MAX_LENGTH: number; + MAX_STRING_LENGTH: number; + }; + export type TranscodeEncoding = 'ascii' | 'utf8' | 'utf16le' | 'ucs2' | 'latin1' | 'binary'; + /** + * Re-encodes the given `Buffer` or `Uint8Array` instance from one character + * encoding to another. Returns a new `Buffer` instance. + * + * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if + * conversion from `fromEnc` to `toEnc` is not permitted. + * + * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`. + * + * The transcoding process will use substitution characters if a given byte + * sequence cannot be adequately represented in the target encoding. For instance: + * + * ```js + * import { Buffer, transcode } from 'buffer'; + * + * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); + * console.log(newBuf.toString('ascii')); + * // Prints: '?' + * ``` + * + * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced + * with `?` in the transcoded `Buffer`. + * @since v7.1.0 + * @param source A `Buffer` or `Uint8Array` instance. + * @param fromEnc The current encoding. + * @param toEnc To target encoding. + */ + export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer; + export const SlowBuffer: { + /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */ + new (size: number): Buffer; + prototype: Buffer; + }; + /** + * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using + * a prior call to `URL.createObjectURL()`. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + export function resolveObjectURL(id: string): Blob | undefined; + export { Buffer }; + /** + * @experimental + */ + export interface BlobOptions { + /** + * @default 'utf8' + */ + encoding?: BufferEncoding | undefined; + /** + * The Blob content-type. The intent is for `type` to convey + * the MIME media type of the data, however no validation of the type format + * is performed. + */ + type?: string | undefined; + } + /** + * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across + * multiple worker threads. + * @since v15.7.0, v14.18.0 + */ + export class Blob { + /** + * The total size of the `Blob` in bytes. + * @since v15.7.0, v14.18.0 + */ + readonly size: number; + /** + * The content-type of the `Blob`. + * @since v15.7.0, v14.18.0 + */ + readonly type: string; + /** + * Creates a new `Blob` object containing a concatenation of the given sources. + * + * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into + * the 'Blob' and can therefore be safely modified after the 'Blob' is created. + * + * String sources are also copied into the `Blob`. + */ + constructor(sources: Array, options?: BlobOptions); + /** + * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of + * the `Blob` data. + * @since v15.7.0, v14.18.0 + */ + arrayBuffer(): Promise; + /** + * Creates and returns a new `Blob` containing a subset of this `Blob` objects + * data. The original `Blob` is not altered. + * @since v15.7.0, v14.18.0 + * @param start The starting index. + * @param end The ending index. + * @param type The content-type for the new `Blob` + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * Returns a promise that fulfills with the contents of the `Blob` decoded as a + * UTF-8 string. + * @since v15.7.0, v14.18.0 + */ + text(): Promise; + /** + * Returns a new (WHATWG) `ReadableStream` that allows the content of the `Blob` to be read. + * @since v16.7.0 + */ + stream(): WebReadableStream; + } + export import atob = globalThis.atob; + export import btoa = globalThis.btoa; + + import { Blob as NodeBlob } from 'buffer'; + // This conditional type will be the existing global Blob in a browser, or + // the copy below in a Node environment. + type __Blob = typeof globalThis extends { onmessage: any, Blob: infer T } + ? T : NodeBlob; + global { + // Buffer class + type BufferEncoding = 'ascii' | 'utf8' | 'utf-8' | 'utf16le' | 'ucs2' | 'ucs-2' | 'base64' | 'base64url' | 'latin1' | 'binary' | 'hex'; + type WithImplicitCoercion = + | T + | { + valueOf(): T; + }; + /** + * Raw data is stored in instances of the Buffer class. + * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. + * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex' + */ + interface BufferConstructor { + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new (str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new (size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new (array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new (arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new (array: ReadonlyArray): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new (buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from(arrayBuffer: WithImplicitCoercion, byteOffset?: number, length?: number): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | ReadonlyArray): Buffer; + from(data: WithImplicitCoercion | string>): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: 'string'): string; + }, + encoding?: BufferEncoding + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns `true` if `obj` is a `Buffer`, `false` otherwise. + * + * ```js + * import { Buffer } from 'buffer'; + * + * Buffer.isBuffer(Buffer.alloc(10)); // true + * Buffer.isBuffer(Buffer.from('foo')); // true + * Buffer.isBuffer('a string'); // false + * Buffer.isBuffer([]); // false + * Buffer.isBuffer(new Uint8Array(1024)); // false + * ``` + * @since v0.1.101 + */ + isBuffer(obj: any): obj is Buffer; + /** + * Returns `true` if `encoding` is the name of a supported character encoding, + * or `false` otherwise. + * + * ```js + * import { Buffer } from 'buffer'; + * + * console.log(Buffer.isEncoding('utf8')); + * // Prints: true + * + * console.log(Buffer.isEncoding('hex')); + * // Prints: true + * + * console.log(Buffer.isEncoding('utf/8')); + * // Prints: false + * + * console.log(Buffer.isEncoding('')); + * // Prints: false + * ``` + * @since v0.9.1 + * @param encoding A character encoding name to check. + */ + isEncoding(encoding: string): encoding is BufferEncoding; + /** + * Returns the byte length of a string when encoded using `encoding`. + * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account + * for the encoding that is used to convert the string into bytes. + * + * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. + * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the + * return value might be greater than the length of a `Buffer` created from the + * string. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const str = '\u00bd + \u00bc = \u00be'; + * + * console.log(`${str}: ${str.length} characters, ` + + * `${Buffer.byteLength(str, 'utf8')} bytes`); + * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes + * ``` + * + * When `string` is a + * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/- + * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop- + * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned. + * @since v0.1.90 + * @param string A value to calculate the length of. + * @param [encoding='utf8'] If `string` is a string, this is its encoding. + * @return The number of bytes contained within `string`. + */ + byteLength(string: string | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, encoding?: BufferEncoding): number; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: ReadonlyArray, totalLength?: number): Buffer; + /** + * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('1234'); + * const buf2 = Buffer.from('0123'); + * const arr = [buf1, buf2]; + * + * console.log(arr.sort(Buffer.compare)); + * // Prints: [ , ] + * // (This result is equal to: [buf2, buf1].) + * ``` + * @since v0.11.13 + * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details. + */ + compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Buffer | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new`Buffer` instances created using `Buffer.allocUnsafe()`,`Buffer.from(array)`, `Buffer.concat()`, and the + * deprecated`new Buffer(size)` constructor only when `size` is less than or equal + * to `Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. A zero-length `Buffer` is created + * if `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + /** + * This is the size (in bytes) of pre-allocated internal `Buffer` instances used + * for pooling. This value may be modified. + * @since v0.11.3 + */ + poolSize: number; + } + interface Buffer extends Uint8Array { + /** + * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did + * not contain enough space to fit the entire string, only part of `string` will be + * written. However, partially encoded characters will not be written. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(256); + * + * const len = buf.write('\u00bd + \u00bc = \u00be', 0); + * + * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); + * // Prints: 12 bytes: ½ + ¼ = ¾ + * + * const buffer = Buffer.alloc(10); + * + * const length = buffer.write('abcd', 8); + * + * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); + * // Prints: 2 bytes : ab + * ``` + * @since v0.1.90 + * @param string String to write to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write `string`. + * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`). + * @param [encoding='utf8'] The character encoding of `string`. + * @return Number of bytes written. + */ + write(string: string, encoding?: BufferEncoding): number; + write(string: string, offset: number, encoding?: BufferEncoding): number; + write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; + /** + * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`. + * + * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8, + * then each invalid byte is replaced with the replacement character `U+FFFD`. + * + * The maximum length of a string instance (in UTF-16 code units) is available + * as {@link constants.MAX_STRING_LENGTH}. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * console.log(buf1.toString('utf8')); + * // Prints: abcdefghijklmnopqrstuvwxyz + * console.log(buf1.toString('utf8', 0, 5)); + * // Prints: abcde + * + * const buf2 = Buffer.from('tést'); + * + * console.log(buf2.toString('hex')); + * // Prints: 74c3a97374 + * console.log(buf2.toString('utf8', 0, 3)); + * // Prints: té + * console.log(buf2.toString(undefined, 0, 3)); + * // Prints: té + * ``` + * @since v0.1.90 + * @param [encoding='utf8'] The character encoding to use. + * @param [start=0] The byte offset to start decoding at. + * @param [end=buf.length] The byte offset to stop decoding at (not inclusive). + */ + toString(encoding?: BufferEncoding, start?: number, end?: number): string; + /** + * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls + * this function when stringifying a `Buffer` instance. + * + * `Buffer.from()` accepts objects in the format returned from this method. + * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); + * const json = JSON.stringify(buf); + * + * console.log(json); + * // Prints: {"type":"Buffer","data":[1,2,3,4,5]} + * + * const copy = JSON.parse(json, (key, value) => { + * return value && value.type === 'Buffer' ? + * Buffer.from(value) : + * value; + * }); + * + * console.log(copy); + * // Prints: + * ``` + * @since v0.9.2 + */ + toJSON(): { + type: 'Buffer'; + data: number[]; + }; + /** + * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('414243', 'hex'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.equals(buf2)); + * // Prints: true + * console.log(buf1.equals(buf3)); + * // Prints: false + * ``` + * @since v0.11.13 + * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`. + */ + equals(otherBuffer: Uint8Array): boolean; + /** + * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order. + * Comparison is based on the actual sequence of bytes in each `Buffer`. + * + * * `0` is returned if `target` is the same as `buf` + * * `1` is returned if `target` should come _before_`buf` when sorted. + * * `-1` is returned if `target` should come _after_`buf` when sorted. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('BCD'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.compare(buf1)); + * // Prints: 0 + * console.log(buf1.compare(buf2)); + * // Prints: -1 + * console.log(buf1.compare(buf3)); + * // Prints: -1 + * console.log(buf2.compare(buf1)); + * // Prints: 1 + * console.log(buf2.compare(buf3)); + * // Prints: 1 + * console.log([buf1, buf2, buf3].sort(Buffer.compare)); + * // Prints: [ , , ] + * // (This result is equal to: [buf1, buf3, buf2].) + * ``` + * + * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); + * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + * + * console.log(buf1.compare(buf2, 5, 9, 0, 4)); + * // Prints: 0 + * console.log(buf1.compare(buf2, 0, 6, 4)); + * // Prints: -1 + * console.log(buf1.compare(buf2, 5, 6, 5)); + * // Prints: 1 + * ``` + * + * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`. + * @since v0.11.13 + * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`. + * @param [targetStart=0] The offset within `target` at which to begin comparison. + * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive). + * @param [sourceStart=0] The offset within `buf` at which to begin comparison. + * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive). + */ + compare(target: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): -1 | 0 | 1; + /** + * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`. + * + * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available + * for all TypedArrays, including Node.js `Buffer`s, although it takes + * different function arguments. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create two `Buffer` instances. + * const buf1 = Buffer.allocUnsafe(26); + * const buf2 = Buffer.allocUnsafe(26).fill('!'); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. + * buf1.copy(buf2, 8, 16, 20); + * // This is equivalent to: + * // buf2.set(buf1.subarray(16, 20), 8); + * + * console.log(buf2.toString('ascii', 0, 25)); + * // Prints: !!!!!!!!qrst!!!!!!!!!!!!! + * ``` + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create a `Buffer` and copy data from one region to an overlapping region + * // within the same `Buffer`. + * + * const buf = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf[i] = i + 97; + * } + * + * buf.copy(buf, 0, 4, 10); + * + * console.log(buf.toString()); + * // Prints: efghijghijklmnopqrstuvwxyz + * ``` + * @since v0.1.90 + * @param target A `Buffer` or {@link Uint8Array} to copy into. + * @param [targetStart=0] The offset within `target` at which to begin writing. + * @param [sourceStart=0] The offset within `buf` from which to begin copying. + * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive). + * @return The number of bytes copied. + */ + copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64BE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64LE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64LE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * This function is also available under the `writeBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64BE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * + * This function is also available under the `writeBigUint64LE` alias. + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64LE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64LE(value: bigint, offset?: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintLE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntLE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntLE + * @since v14.9.0, v12.19.0 + */ + writeUintLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintBE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntBE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntBE + * @since v14.9.0, v12.19.0 + */ + writeUintBE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than a signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a + * signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntBE(value: number, offset: number, byteLength: number): number; + /** + * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64BE(0)); + * // Prints: 4294967295n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64BE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + readBigUint64BE(offset?: number): bigint; + /** + * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64LE(0)); + * // Prints: 18446744069414584320n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64LE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + readBigUint64LE(offset?: number): bigint; + /** + * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64LE(offset?: number): bigint; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintLE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntLE(0, 6).toString(16)); + * // Prints: ab9078563412 + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntLE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntLE + * @since v14.9.0, v12.19.0 + */ + readUintLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintBE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readUIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntBE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntBE + * @since v14.9.0, v12.19.0 + */ + readUintBE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntLE(0, 6).toString(16)); + * // Prints: -546f87a9cbee + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * console.log(buf.readIntBE(1, 0).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntBE(offset: number, byteLength: number): number; + /** + * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint8` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, -2]); + * + * console.log(buf.readUInt8(0)); + * // Prints: 1 + * console.log(buf.readUInt8(1)); + * // Prints: 254 + * console.log(buf.readUInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readUInt8(offset?: number): number; + /** + * @alias Buffer.readUInt8 + * @since v14.9.0, v12.19.0 + */ + readUint8(offset?: number): number; + /** + * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16LE(0).toString(16)); + * // Prints: 3412 + * console.log(buf.readUInt16LE(1).toString(16)); + * // Prints: 5634 + * console.log(buf.readUInt16LE(2).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16LE(offset?: number): number; + /** + * @alias Buffer.readUInt16LE + * @since v14.9.0, v12.19.0 + */ + readUint16LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16BE(0).toString(16)); + * // Prints: 1234 + * console.log(buf.readUInt16BE(1).toString(16)); + * // Prints: 3456 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16BE(offset?: number): number; + /** + * @alias Buffer.readUInt16BE + * @since v14.9.0, v12.19.0 + */ + readUint16BE(offset?: number): number; + /** + * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32LE(0).toString(16)); + * // Prints: 78563412 + * console.log(buf.readUInt32LE(1).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32LE(offset?: number): number; + /** + * @alias Buffer.readUInt32LE + * @since v14.9.0, v12.19.0 + */ + readUint32LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32BE(0).toString(16)); + * // Prints: 12345678 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32BE(offset?: number): number; + /** + * @alias Buffer.readUInt32BE + * @since v14.9.0, v12.19.0 + */ + readUint32BE(offset?: number): number; + /** + * Reads a signed 8-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([-1, 5]); + * + * console.log(buf.readInt8(0)); + * // Prints: -1 + * console.log(buf.readInt8(1)); + * // Prints: 5 + * console.log(buf.readInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readInt8(offset?: number): number; + /** + * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16LE(0)); + * // Prints: 1280 + * console.log(buf.readInt16LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16LE(offset?: number): number; + /** + * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16BE(offset?: number): number; + /** + * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32LE(0)); + * // Prints: 83886080 + * console.log(buf.readInt32LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32LE(offset?: number): number; + /** + * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32BE(offset?: number): number; + /** + * Reads a 32-bit, little-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatLE(0)); + * // Prints: 1.539989614439558e-36 + * console.log(buf.readFloatLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatLE(offset?: number): number; + /** + * Reads a 32-bit, big-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatBE(0)); + * // Prints: 2.387939260590663e-38 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatBE(offset?: number): number; + /** + * Reads a 64-bit, little-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleLE(0)); + * // Prints: 5.447603722011605e-270 + * console.log(buf.readDoubleLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleLE(offset?: number): number; + /** + * Reads a 64-bit, big-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleBE(0)); + * // Prints: 8.20788039913184e-304 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleBE(offset?: number): number; + reverse(): this; + /** + * Interprets `buf` as an array of unsigned 16-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap16(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap16(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * + * One convenient use of `buf.swap16()` is to perform a fast in-place conversion + * between UTF-16 little-endian and UTF-16 big-endian: + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); + * buf.swap16(); // Convert to big-endian UTF-16 text. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap16(): Buffer; + /** + * Interprets `buf` as an array of unsigned 32-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap32(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap32(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap32(): Buffer; + /** + * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_. + * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap64(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap64(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v6.3.0 + * @return A reference to `buf`. + */ + swap64(): Buffer; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a + * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything + * other than an unsigned 8-bit integer. + * + * This function is also available under the `writeUint8` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt8(0x3, 0); + * buf.writeUInt8(0x4, 1); + * buf.writeUInt8(0x23, 2); + * buf.writeUInt8(0x42, 3); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeUInt8(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt8 + * @since v14.9.0, v12.19.0 + */ + writeUint8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 16-bit integer. + * + * This function is also available under the `writeUint16LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16LE(0xdead, 0); + * buf.writeUInt16LE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16LE + * @since v14.9.0, v12.19.0 + */ + writeUint16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 16-bit integer. + * + * This function is also available under the `writeUint16BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16BE(0xdead, 0); + * buf.writeUInt16BE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16BE + * @since v14.9.0, v12.19.0 + */ + writeUint16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 32-bit integer. + * + * This function is also available under the `writeUint32LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32LE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32LE + * @since v14.9.0, v12.19.0 + */ + writeUint32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 32-bit integer. + * + * This function is also available under the `writeUint32BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32BE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32BE + * @since v14.9.0, v12.19.0 + */ + writeUint32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a valid + * signed 8-bit integer. Behavior is undefined when `value` is anything other than + * a signed 8-bit integer. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt8(2, 0); + * buf.writeInt8(-2, 1); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeInt8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16LE(0x0304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16BE(0x0102, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32LE(0x05060708, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32BE(0x01020304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatLE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatBE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatBE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleLE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleBE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleBE(value: number, offset?: number): number; + /** + * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, + * the entire `buf` will be filled: + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Fill a `Buffer` with the ASCII character 'h'. + * + * const b = Buffer.allocUnsafe(50).fill('h'); + * + * console.log(b.toString()); + * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh + * ``` + * + * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or + * integer. If the resulting integer is greater than `255` (decimal), `buf` will be + * filled with `value & 255`. + * + * If the final write of a `fill()` operation falls on a multi-byte character, + * then only the bytes of that character that fit into `buf` are written: + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Fill a `Buffer` with character that takes up two bytes in UTF-8. + * + * console.log(Buffer.allocUnsafe(5).fill('\u0222')); + * // Prints: + * ``` + * + * If `value` contains invalid characters, it is truncated; if no valid + * fill data remains, an exception is thrown: + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(5); + * + * console.log(buf.fill('a')); + * // Prints: + * console.log(buf.fill('aazz', 'hex')); + * // Prints: + * console.log(buf.fill('zz', 'hex')); + * // Throws an exception. + * ``` + * @since v0.5.0 + * @param value The value with which to fill `buf`. + * @param [offset=0] Number of bytes to skip before starting to fill `buf`. + * @param [end=buf.length] Where to stop filling `buf` (not inclusive). + * @param [encoding='utf8'] The encoding for `value` if `value` is a string. + * @return A reference to `buf`. + */ + fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; + /** + * If `value` is: + * + * * a string, `value` is interpreted according to the character encoding in`encoding`. + * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety. + * To compare a partial `Buffer`, use `buf.subarray`. + * * a number, `value` will be interpreted as an unsigned 8-bit integer + * value between `0` and `255`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.indexOf('this')); + * // Prints: 0 + * console.log(buf.indexOf('is')); + * // Prints: 2 + * console.log(buf.indexOf(Buffer.from('a buffer'))); + * // Prints: 8 + * console.log(buf.indexOf(97)); + * // Prints: 8 (97 is the decimal ASCII value for 'a') + * console.log(buf.indexOf(Buffer.from('a buffer example'))); + * // Prints: -1 + * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: 8 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); + * // Prints: 4 + * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); + * // Prints: 6 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. If the result + * of coercion is `NaN` or `0`, then the entire buffer will be searched. This + * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf). + * + * ```js + * import { Buffer } from 'buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.indexOf(99.9)); + * console.log(b.indexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN or 0. + * // Prints: 1, searching the whole buffer. + * console.log(b.indexOf('b', undefined)); + * console.log(b.indexOf('b', {})); + * console.log(b.indexOf('b', null)); + * console.log(b.indexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer` and `byteOffset` is less + * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned. + * @since v1.5.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Identical to `buf.indexOf()`, except the last occurrence of `value` is found + * rather than the first occurrence. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('this buffer is a buffer'); + * + * console.log(buf.lastIndexOf('this')); + * // Prints: 0 + * console.log(buf.lastIndexOf('buffer')); + * // Prints: 17 + * console.log(buf.lastIndexOf(Buffer.from('buffer'))); + * // Prints: 17 + * console.log(buf.lastIndexOf(97)); + * // Prints: 15 (97 is the decimal ASCII value for 'a') + * console.log(buf.lastIndexOf(Buffer.from('yolo'))); + * // Prints: -1 + * console.log(buf.lastIndexOf('buffer', 5)); + * // Prints: 5 + * console.log(buf.lastIndexOf('buffer', 4)); + * // Prints: -1 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); + * // Prints: 6 + * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); + * // Prints: 4 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. Any arguments + * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. + * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf). + * + * ```js + * import { Buffer } from 'buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.lastIndexOf(99.9)); + * console.log(b.lastIndexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN. + * // Prints: 1, searching the whole buffer. + * console.log(b.lastIndexOf('b', undefined)); + * console.log(b.lastIndexOf('b', {})); + * + * // Passing a byteOffset that coerces to 0. + * // Prints: -1, equivalent to passing 0. + * console.log(b.lastIndexOf('b', null)); + * console.log(b.lastIndexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. + * @since v6.0.0 + * @param value What to search for. + * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents + * of `buf`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Log the entire contents of a `Buffer`. + * + * const buf = Buffer.from('buffer'); + * + * for (const pair of buf.entries()) { + * console.log(pair); + * } + * // Prints: + * // [0, 98] + * // [1, 117] + * // [2, 102] + * // [3, 102] + * // [4, 101] + * // [5, 114] + * ``` + * @since v1.1.0 + */ + entries(): IterableIterator<[number, number]>; + /** + * Equivalent to `buf.indexOf() !== -1`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.includes('this')); + * // Prints: true + * console.log(buf.includes('is')); + * // Prints: true + * console.log(buf.includes(Buffer.from('a buffer'))); + * // Prints: true + * console.log(buf.includes(97)); + * // Prints: true (97 is the decimal ASCII value for 'a') + * console.log(buf.includes(Buffer.from('a buffer example'))); + * // Prints: false + * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: true + * console.log(buf.includes('this', 4)); + * // Prints: false + * ``` + * @since v5.3.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is its encoding. + * @return `true` if `value` was found in `buf`, `false` otherwise. + */ + includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices). + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const key of buf.keys()) { + * console.log(key); + * } + * // Prints: + * // 0 + * // 1 + * // 2 + * // 3 + * // 4 + * // 5 + * ``` + * @since v1.1.0 + */ + keys(): IterableIterator; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is + * called automatically when a `Buffer` is used in a `for..of` statement. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const value of buf.values()) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * + * for (const value of buf) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * ``` + * @since v1.1.0 + */ + values(): IterableIterator; + } + var Buffer: BufferConstructor; + /** + * Decodes a string of Base64-encoded data into bytes, and encodes those bytes + * into a string using Latin-1 (ISO-8859-1). + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @deprecated Use `Buffer.from(data, 'base64')` instead. + * @param data The Base64-encoded input string. + */ + function atob(data: string): string; + /** + * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes + * into a string using Base64. + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @deprecated Use `buf.toString('base64')` instead. + * @param data An ASCII (Latin1) string. + */ + function btoa(data: string): string; + + interface Blob extends __Blob {} + /** + * `Blob` class is a global reference for `require('node:buffer').Blob` + * https://nodejs.org/api/buffer.html#class-blob + * @since v18.0.0 + */ + var Blob: typeof globalThis extends { + onmessage: any; + Blob: infer T; + } + ? T + : typeof NodeBlob; + } +} +declare module 'node:buffer' { + export * from 'buffer'; +} diff --git a/node_modules/@types/node/child_process.d.ts b/node_modules/@types/node/child_process.d.ts new file mode 100755 index 0000000..79c7290 --- /dev/null +++ b/node_modules/@types/node/child_process.d.ts @@ -0,0 +1,1369 @@ +/** + * The `child_process` module provides the ability to spawn subprocesses in + * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability + * is primarily provided by the {@link spawn} function: + * + * ```js + * const { spawn } = require('child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * By default, pipes for `stdin`, `stdout`, and `stderr` are established between + * the parent Node.js process and the spawned subprocess. These pipes have + * limited (and platform-specific) capacity. If the subprocess writes to + * stdout in excess of that limit without the output being captured, the + * subprocess blocks waiting for the pipe buffer to accept more data. This is + * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }`option if the output will not be consumed. + * + * The command lookup is performed using the `options.env.PATH` environment + * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is + * used. If `options.env` is set without `PATH`, lookup on Unix is performed + * on a default search path search of `/usr/bin:/bin` (see your operating system's + * manual for execvpe/execvp), on Windows the current processes environment + * variable `PATH` is used. + * + * On Windows, environment variables are case-insensitive. Node.js + * lexicographically sorts the `env` keys and uses the first one that + * case-insensitively matches. Only first (in lexicographic order) entry will be + * passed to the subprocess. This might lead to issues on Windows when passing + * objects to the `env` option that have multiple variants of the same key, such as`PATH` and `Path`. + * + * The {@link spawn} method spawns the child process asynchronously, + * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks + * the event loop until the spawned process either exits or is terminated. + * + * For convenience, the `child_process` module provides a handful of synchronous + * and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on + * top of {@link spawn} or {@link spawnSync}. + * + * * {@link exec}: spawns a shell and runs a command within that + * shell, passing the `stdout` and `stderr` to a callback function when + * complete. + * * {@link execFile}: similar to {@link exec} except + * that it spawns the command directly without first spawning a shell by + * default. + * * {@link fork}: spawns a new Node.js process and invokes a + * specified module with an IPC communication channel established that allows + * sending messages between parent and child. + * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop. + * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop. + * + * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however, + * the synchronous methods can have significant impact on performance due to + * stalling the event loop while spawned processes complete. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/child_process.js) + */ +declare module 'child_process' { + import { ObjectEncodingOptions } from 'node:fs'; + import { EventEmitter, Abortable } from 'node:events'; + import * as net from 'node:net'; + import { Writable, Readable, Stream, Pipe } from 'node:stream'; + import { URL } from 'node:url'; + type Serializable = string | object | number | boolean | bigint; + type SendHandle = net.Socket | net.Server; + /** + * Instances of the `ChildProcess` represent spawned child processes. + * + * Instances of `ChildProcess` are not intended to be created directly. Rather, + * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create + * instances of `ChildProcess`. + * @since v2.2.0 + */ + class ChildProcess extends EventEmitter { + /** + * A `Writable Stream` that represents the child process's `stdin`. + * + * If a child process waits to read all of its input, the child will not continue + * until this stream has been closed via `end()`. + * + * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will + * refer to the same value. + * + * The `subprocess.stdin` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.1.90 + */ + stdin: Writable | null; + /** + * A `Readable Stream` that represents the child process's `stdout`. + * + * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will + * refer to the same value. + * + * ```js + * const { spawn } = require('child_process'); + * + * const subprocess = spawn('ls'); + * + * subprocess.stdout.on('data', (data) => { + * console.log(`Received chunk ${data}`); + * }); + * ``` + * + * The `subprocess.stdout` property can be `null` if the child process could + * not be successfully spawned. + * @since v0.1.90 + */ + stdout: Readable | null; + /** + * A `Readable Stream` that represents the child process's `stderr`. + * + * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will + * refer to the same value. + * + * The `subprocess.stderr` property can be `null` if the child process could + * not be successfully spawned. + * @since v0.1.90 + */ + stderr: Readable | null; + /** + * The `subprocess.channel` property is a reference to the child's IPC channel. If + * no IPC channel currently exists, this property is `undefined`. + * @since v7.1.0 + */ + readonly channel?: Pipe | null | undefined; + /** + * A sparse array of pipes to the child process, corresponding with positions in + * the `stdio` option passed to {@link spawn} that have been set + * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and`subprocess.stdio[2]` are also available as `subprocess.stdin`,`subprocess.stdout`, and `subprocess.stderr`, + * respectively. + * + * In the following example, only the child's fd `1` (stdout) is configured as a + * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values + * in the array are `null`. + * + * ```js + * const assert = require('assert'); + * const fs = require('fs'); + * const child_process = require('child_process'); + * + * const subprocess = child_process.spawn('ls', { + * stdio: [ + * 0, // Use parent's stdin for child. + * 'pipe', // Pipe child's stdout to parent. + * fs.openSync('err.out', 'w'), // Direct child's stderr to a file. + * ] + * }); + * + * assert.strictEqual(subprocess.stdio[0], null); + * assert.strictEqual(subprocess.stdio[0], subprocess.stdin); + * + * assert(subprocess.stdout); + * assert.strictEqual(subprocess.stdio[1], subprocess.stdout); + * + * assert.strictEqual(subprocess.stdio[2], null); + * assert.strictEqual(subprocess.stdio[2], subprocess.stderr); + * ``` + * + * The `subprocess.stdio` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.7.10 + */ + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined // extra + ]; + /** + * The `subprocess.killed` property indicates whether the child process + * successfully received a signal from `subprocess.kill()`. The `killed` property + * does not indicate that the child process has been terminated. + * @since v0.5.10 + */ + readonly killed: boolean; + /** + * Returns the process identifier (PID) of the child process. If the child process + * fails to spawn due to errors, then the value is `undefined` and `error` is + * emitted. + * + * ```js + * const { spawn } = require('child_process'); + * const grep = spawn('grep', ['ssh']); + * + * console.log(`Spawned child pid: ${grep.pid}`); + * grep.stdin.end(); + * ``` + * @since v0.1.90 + */ + readonly pid?: number | undefined; + /** + * The `subprocess.connected` property indicates whether it is still possible to + * send and receive messages from a child process. When `subprocess.connected` is`false`, it is no longer possible to send or receive messages. + * @since v0.7.2 + */ + readonly connected: boolean; + /** + * The `subprocess.exitCode` property indicates the exit code of the child process. + * If the child process is still running, the field will be `null`. + */ + readonly exitCode: number | null; + /** + * The `subprocess.signalCode` property indicates the signal received by + * the child process if any, else `null`. + */ + readonly signalCode: NodeJS.Signals | null; + /** + * The `subprocess.spawnargs` property represents the full list of command-line + * arguments the child process was launched with. + */ + readonly spawnargs: string[]; + /** + * The `subprocess.spawnfile` property indicates the executable file name of + * the child process that is launched. + * + * For {@link fork}, its value will be equal to `process.execPath`. + * For {@link spawn}, its value will be the name of + * the executable file. + * For {@link exec}, its value will be the name of the shell + * in which the child process is launched. + */ + readonly spawnfile: string; + /** + * The `subprocess.kill()` method sends a signal to the child process. If no + * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function + * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise. + * + * ```js + * const { spawn } = require('child_process'); + * const grep = spawn('grep', ['ssh']); + * + * grep.on('close', (code, signal) => { + * console.log( + * `child process terminated due to receipt of signal ${signal}`); + * }); + * + * // Send SIGHUP to process. + * grep.kill('SIGHUP'); + * ``` + * + * The `ChildProcess` object may emit an `'error'` event if the signal + * cannot be delivered. Sending a signal to a child process that has already exited + * is not an error but may have unforeseen consequences. Specifically, if the + * process identifier (PID) has been reassigned to another process, the signal will + * be delivered to that process instead which can have unexpected results. + * + * While the function is called `kill`, the signal delivered to the child process + * may not actually terminate the process. + * + * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference. + * + * On Windows, where POSIX signals do not exist, the `signal` argument will be + * ignored, and the process will be killed forcefully and abruptly (similar to`'SIGKILL'`). + * See `Signal Events` for more details. + * + * On Linux, child processes of child processes will not be terminated + * when attempting to kill their parent. This is likely to happen when running a + * new process in a shell or with the use of the `shell` option of `ChildProcess`: + * + * ```js + * 'use strict'; + * const { spawn } = require('child_process'); + * + * const subprocess = spawn( + * 'sh', + * [ + * '-c', + * `node -e "setInterval(() => { + * console.log(process.pid, 'is alive') + * }, 500);"`, + * ], { + * stdio: ['inherit', 'inherit', 'inherit'] + * } + * ); + * + * setTimeout(() => { + * subprocess.kill(); // Does not terminate the Node.js process in the shell. + * }, 2000); + * ``` + * @since v0.1.90 + */ + kill(signal?: NodeJS.Signals | number): boolean; + /** + * When an IPC channel has been established between the parent and child ( + * i.e. when using {@link fork}), the `subprocess.send()` method can + * be used to send messages to the child process. When the child process is a + * Node.js instance, these messages can be received via the `'message'` event. + * + * The message goes through serialization and parsing. The resulting + * message might not be the same as what is originally sent. + * + * For example, in the parent script: + * + * ```js + * const cp = require('child_process'); + * const n = cp.fork(`${__dirname}/sub.js`); + * + * n.on('message', (m) => { + * console.log('PARENT got message:', m); + * }); + * + * // Causes the child to print: CHILD got message: { hello: 'world' } + * n.send({ hello: 'world' }); + * ``` + * + * And then the child script, `'sub.js'` might look like this: + * + * ```js + * process.on('message', (m) => { + * console.log('CHILD got message:', m); + * }); + * + * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null } + * process.send({ foo: 'bar', baz: NaN }); + * ``` + * + * Child Node.js processes will have a `process.send()` method of their own + * that allows the child to send messages back to the parent. + * + * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages + * containing a `NODE_` prefix in the `cmd` property are reserved for use within + * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the`'internalMessage'` event and are consumed internally by Node.js. + * Applications should avoid using such messages or listening for`'internalMessage'` events as it is subject to change without notice. + * + * The optional `sendHandle` argument that may be passed to `subprocess.send()` is + * for passing a TCP server or socket object to the child process. The child will + * receive the object as the second argument passed to the callback function + * registered on the `'message'` event. Any data that is received + * and buffered in the socket will not be sent to the child. + * + * The optional `callback` is a function that is invoked after the message is + * sent but before the child may have received it. The function is called with a + * single argument: `null` on success, or an `Error` object on failure. + * + * If no `callback` function is provided and the message cannot be sent, an`'error'` event will be emitted by the `ChildProcess` object. This can + * happen, for instance, when the child process has already exited. + * + * `subprocess.send()` will return `false` if the channel has closed or when the + * backlog of unsent messages exceeds a threshold that makes it unwise to send + * more. Otherwise, the method returns `true`. The `callback` function can be + * used to implement flow control. + * + * #### Example: sending a server object + * + * The `sendHandle` argument can be used, for instance, to pass the handle of + * a TCP server object to the child process as illustrated in the example below: + * + * ```js + * const subprocess = require('child_process').fork('subprocess.js'); + * + * // Open up the server object and send the handle. + * const server = require('net').createServer(); + * server.on('connection', (socket) => { + * socket.end('handled by parent'); + * }); + * server.listen(1337, () => { + * subprocess.send('server', server); + * }); + * ``` + * + * The child would then receive the server object as: + * + * ```js + * process.on('message', (m, server) => { + * if (m === 'server') { + * server.on('connection', (socket) => { + * socket.end('handled by child'); + * }); + * } + * }); + * ``` + * + * Once the server is now shared between the parent and child, some connections + * can be handled by the parent and some by the child. + * + * While the example above uses a server created using the `net` module, `dgram`module servers use exactly the same workflow with the exceptions of listening on + * a `'message'` event instead of `'connection'` and using `server.bind()` instead + * of `server.listen()`. This is, however, currently only supported on Unix + * platforms. + * + * #### Example: sending a socket object + * + * Similarly, the `sendHandler` argument can be used to pass the handle of a + * socket to the child process. The example below spawns two children that each + * handle connections with "normal" or "special" priority: + * + * ```js + * const { fork } = require('child_process'); + * const normal = fork('subprocess.js', ['normal']); + * const special = fork('subprocess.js', ['special']); + * + * // Open up the server and send sockets to child. Use pauseOnConnect to prevent + * // the sockets from being read before they are sent to the child process. + * const server = require('net').createServer({ pauseOnConnect: true }); + * server.on('connection', (socket) => { + * + * // If this is special priority... + * if (socket.remoteAddress === '74.125.127.100') { + * special.send('socket', socket); + * return; + * } + * // This is normal priority. + * normal.send('socket', socket); + * }); + * server.listen(1337); + * ``` + * + * The `subprocess.js` would receive the socket handle as the second argument + * passed to the event callback function: + * + * ```js + * process.on('message', (m, socket) => { + * if (m === 'socket') { + * if (socket) { + * // Check that the client socket exists. + * // It is possible for the socket to be closed between the time it is + * // sent and the time it is received in the child process. + * socket.end(`Request handled with ${process.argv[2]} priority`); + * } + * } + * }); + * ``` + * + * Do not use `.maxConnections` on a socket that has been passed to a subprocess. + * The parent cannot track when the socket is destroyed. + * + * Any `'message'` handlers in the subprocess should verify that `socket` exists, + * as the connection may have been closed during the time it takes to send the + * connection to the child. + * @since v0.5.9 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, options?: MessageOptions, callback?: (error: Error | null) => void): boolean; + /** + * Closes the IPC channel between parent and child, allowing the child to exit + * gracefully once there are no other connections keeping it alive. After calling + * this method the `subprocess.connected` and `process.connected` properties in + * both the parent and child (respectively) will be set to `false`, and it will be + * no longer possible to pass messages between the processes. + * + * The `'disconnect'` event will be emitted when there are no messages in the + * process of being received. This will most often be triggered immediately after + * calling `subprocess.disconnect()`. + * + * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked + * within the child process to close the IPC channel as well. + * @since v0.7.2 + */ + disconnect(): void; + /** + * By default, the parent will wait for the detached child to exit. To prevent the + * parent from waiting for a given `subprocess` to exit, use the`subprocess.unref()` method. Doing so will cause the parent's event loop to not + * include the child in its reference count, allowing the parent to exit + * independently of the child, unless there is an established IPC channel between + * the child and the parent. + * + * ```js + * const { spawn } = require('child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore' + * }); + * + * subprocess.unref(); + * ``` + * @since v0.7.10 + */ + unref(): void; + /** + * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will + * restore the removed reference count for the child process, forcing the parent + * to wait for the child to exit before exiting itself. + * + * ```js + * const { spawn } = require('child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore' + * }); + * + * subprocess.unref(); + * subprocess.ref(); + * ``` + * @since v0.7.10 + */ + ref(): void; + /** + * events.EventEmitter + * 1. close + * 2. disconnect + * 3. error + * 4. exit + * 5. message + * 6. spawn + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: 'disconnect', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + addListener(event: 'spawn', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close', code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: 'disconnect'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'exit', code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: 'message', message: Serializable, sendHandle: SendHandle): boolean; + emit(event: 'spawn', listener: () => void): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: 'disconnect', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + on(event: 'spawn', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: 'disconnect', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + once(event: 'spawn', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: 'disconnect', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependListener(event: 'spawn', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependOnceListener(event: 'disconnect', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependOnceListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependOnceListener(event: 'spawn', listener: () => void): this; + } + // return this object when stdio option is undefined or not specified + interface ChildProcessWithoutNullStreams extends ChildProcess { + stdin: Writable; + stdout: Readable; + stderr: Readable; + readonly stdio: [ + Writable, + Readable, + Readable, + // stderr + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined // extra, no modification + ]; + } + // return this object when stdio option is a tuple of 3 + interface ChildProcessByStdio extends ChildProcess { + stdin: I; + stdout: O; + stderr: E; + readonly stdio: [ + I, + O, + E, + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined // extra, no modification + ]; + } + interface MessageOptions { + keepOpen?: boolean | undefined; + } + type IOType = 'overlapped' | 'pipe' | 'ignore' | 'inherit'; + type StdioOptions = IOType | Array; + type SerializationType = 'json' | 'advanced'; + interface MessagingOptions extends Abortable { + /** + * Specify the kind of serialization used for sending messages between processes. + * @default 'json' + */ + serialization?: SerializationType | undefined; + /** + * The signal value to be used when the spawned process will be killed by the abort signal. + * @default 'SIGTERM' + */ + killSignal?: NodeJS.Signals | number | undefined; + /** + * In milliseconds the maximum amount of time the process is allowed to run. + */ + timeout?: number | undefined; + } + interface ProcessEnvOptions { + uid?: number | undefined; + gid?: number | undefined; + cwd?: string | URL | undefined; + env?: NodeJS.ProcessEnv | undefined; + } + interface CommonOptions extends ProcessEnvOptions { + /** + * @default true + */ + windowsHide?: boolean | undefined; + /** + * @default 0 + */ + timeout?: number | undefined; + } + interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable { + argv0?: string | undefined; + stdio?: StdioOptions | undefined; + shell?: boolean | string | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + interface SpawnOptions extends CommonSpawnOptions { + detached?: boolean | undefined; + } + interface SpawnOptionsWithoutStdio extends SpawnOptions { + stdio?: StdioPipeNamed | StdioPipe[] | undefined; + } + type StdioNull = 'inherit' | 'ignore' | Stream; + type StdioPipeNamed = 'pipe' | 'overlapped'; + type StdioPipe = undefined | null | StdioPipeNamed; + interface SpawnOptionsWithStdioTuple extends SpawnOptions { + stdio: [Stdin, Stdout, Stderr]; + } + /** + * The `child_process.spawn()` method spawns a new process using the given`command`, with command-line arguments in `args`. If omitted, `args` defaults + * to an empty array. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * A third argument may be used to specify additional options, with these defaults: + * + * ```js + * const defaults = { + * cwd: undefined, + * env: process.env + * }; + * ``` + * + * Use `cwd` to specify the working directory from which the process is spawned. + * If not given, the default is to inherit the current working directory. If given, + * but the path does not exist, the child process emits an `ENOENT` error + * and exits immediately. `ENOENT` is also emitted when the command + * does not exist. + * + * Use `env` to specify environment variables that will be visible to the new + * process, the default is `process.env`. + * + * `undefined` values in `env` will be ignored. + * + * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the + * exit code: + * + * ```js + * const { spawn } = require('child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * Example: A very elaborate way to run `ps ax | grep ssh` + * + * ```js + * const { spawn } = require('child_process'); + * const ps = spawn('ps', ['ax']); + * const grep = spawn('grep', ['ssh']); + * + * ps.stdout.on('data', (data) => { + * grep.stdin.write(data); + * }); + * + * ps.stderr.on('data', (data) => { + * console.error(`ps stderr: ${data}`); + * }); + * + * ps.on('close', (code) => { + * if (code !== 0) { + * console.log(`ps process exited with code ${code}`); + * } + * grep.stdin.end(); + * }); + * + * grep.stdout.on('data', (data) => { + * console.log(data.toString()); + * }); + * + * grep.stderr.on('data', (data) => { + * console.error(`grep stderr: ${data}`); + * }); + * + * grep.on('close', (code) => { + * if (code !== 0) { + * console.log(`grep process exited with code ${code}`); + * } + * }); + * ``` + * + * Example of checking for failed `spawn`: + * + * ```js + * const { spawn } = require('child_process'); + * const subprocess = spawn('bad_command'); + * + * subprocess.on('error', (err) => { + * console.error('Failed to start subprocess.'); + * }); + * ``` + * + * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process + * title while others (Windows, SunOS) will use `command`. + * + * Node.js currently overwrites `argv[0]` with `process.execPath` on startup, so`process.argv[0]` in a Node.js child process will not match the `argv0`parameter passed to `spawn` from the parent, + * retrieve it with the`process.argv0` property instead. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { spawn } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const grep = spawn('grep', ['ssh'], { signal }); + * grep.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * ``` + * @since v0.1.90 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptions): ChildProcess; + // overloads of spawn with 'args' + function spawn(command: string, args?: ReadonlyArray, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptions): ChildProcess; + interface ExecOptions extends CommonOptions { + shell?: string | undefined; + signal?: AbortSignal | undefined; + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + } + interface ExecOptionsWithStringEncoding extends ExecOptions { + encoding: BufferEncoding; + } + interface ExecOptionsWithBufferEncoding extends ExecOptions { + encoding: BufferEncoding | null; // specify `null`. + } + interface ExecException extends Error { + cmd?: string | undefined; + killed?: boolean | undefined; + code?: number | undefined; + signal?: NodeJS.Signals | undefined; + } + /** + * Spawns a shell then executes the `command` within that shell, buffering any + * generated output. The `command` string passed to the exec function is processed + * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters)) + * need to be dealt with accordingly: + * + * ```js + * const { exec } = require('child_process'); + * + * exec('"/path/to/test file/test.sh" arg1 arg2'); + * // Double quotes are used so that the space in the path is not interpreted as + * // a delimiter of multiple arguments. + * + * exec('echo "The \\$HOME variable is $HOME"'); + * // The $HOME variable is escaped in the first instance, but not in the second. + * ``` + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * + * If a `callback` function is provided, it is called with the arguments`(error, stdout, stderr)`. On success, `error` will be `null`. On error,`error` will be an instance of `Error`. The + * `error.code` property will be + * the exit code of the process. By convention, any exit code other than `0`indicates an error. `error.signal` will be the signal that terminated the + * process. + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * ```js + * const { exec } = require('child_process'); + * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => { + * if (error) { + * console.error(`exec error: ${error}`); + * return; + * } + * console.log(`stdout: ${stdout}`); + * console.error(`stderr: ${stderr}`); + * }); + * ``` + * + * If `timeout` is greater than `0`, the parent will send the signal + * identified by the `killSignal` property (the default is `'SIGTERM'`) if the + * child runs longer than `timeout` milliseconds. + * + * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace + * the existing process and uses a shell to execute the command. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('util'); + * const exec = util.promisify(require('child_process').exec); + * + * async function lsExample() { + * const { stdout, stderr } = await exec('ls'); + * console.log('stdout:', stdout); + * console.error('stderr:', stderr); + * } + * lsExample(); + * ``` + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { exec } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = exec('grep ssh', { signal }, (error) => { + * console.log(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.90 + * @param command The command to run, with space-separated arguments. + * @param callback called with the output when process terminates. + */ + function exec(command: string, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function exec( + command: string, + options: { + encoding: 'buffer' | null; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function exec(command: string, options: ExecOptions, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function exec( + command: string, + options: (ObjectEncodingOptions & ExecOptions) | undefined | null, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void + ): ChildProcess; + interface PromiseWithChild extends Promise { + child: ChildProcess; + } + namespace exec { + function __promisify__(command: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: { + encoding: 'buffer' | null; + } & ExecOptions + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: ExecOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options?: (ObjectEncodingOptions & ExecOptions) | null + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ExecFileOptions extends CommonOptions, Abortable { + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + windowsVerbatimArguments?: boolean | undefined; + shell?: boolean | string | undefined; + signal?: AbortSignal | undefined; + } + interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { + encoding: 'buffer' | null; + } + interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + type ExecFileException = ExecException & NodeJS.ErrnoException; + /** + * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified + * executable `file` is spawned directly as a new process making it slightly more + * efficient than {@link exec}. + * + * The same options as {@link exec} are supported. Since a shell is + * not spawned, behaviors such as I/O redirection and file globbing are not + * supported. + * + * ```js + * const { execFile } = require('child_process'); + * const child = execFile('node', ['--version'], (error, stdout, stderr) => { + * if (error) { + * throw error; + * } + * console.log(stdout); + * }); + * ``` + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('util'); + * const execFile = util.promisify(require('child_process').execFile); + * async function getVersion() { + * const { stdout } = await execFile('node', ['--version']); + * console.log(stdout); + * } + * getVersion(); + * ``` + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { execFile } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = execFile('node', ['--version'], { signal }, (error) => { + * console.log(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.91 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @param callback Called with the output when process terminates. + */ + function execFile(file: string): ChildProcess; + function execFile(file: string, options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null): ChildProcess; + function execFile(file: string, args?: ReadonlyArray | null): ChildProcess; + function execFile(file: string, args: ReadonlyArray | undefined | null, options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null): ChildProcess; + // no `options` definitely means stdout/stderr are `string`. + function execFile(file: string, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + function execFile(file: string, args: ReadonlyArray | undefined | null, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function execFile(file: string, options: ExecFileOptionsWithBufferEncoding, callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function execFile(file: string, options: ExecFileOptionsWithStringEncoding, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function execFile(file: string, options: ExecFileOptionsWithOtherEncoding, callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function execFile(file: string, options: ExecFileOptions, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) | undefined | null + ): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) | undefined | null + ): ChildProcess; + namespace execFile { + function __promisify__(file: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithBufferEncoding + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithBufferEncoding + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithStringEncoding + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithStringEncoding + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithOtherEncoding + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithOtherEncoding + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable { + execPath?: string | undefined; + execArgv?: string[] | undefined; + silent?: boolean | undefined; + stdio?: StdioOptions | undefined; + detached?: boolean | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + /** + * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes. + * Like {@link spawn}, a `ChildProcess` object is returned. The + * returned `ChildProcess` will have an additional communication channel + * built-in that allows messages to be passed back and forth between the parent and + * child. See `subprocess.send()` for details. + * + * Keep in mind that spawned Node.js child processes are + * independent of the parent with exception of the IPC communication channel + * that is established between the two. Each process has its own memory, with + * their own V8 instances. Because of the additional resource allocations + * required, spawning a large number of child Node.js processes is not + * recommended. + * + * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the`options` object allows for an alternative + * execution path to be used. + * + * Node.js processes launched with a custom `execPath` will communicate with the + * parent process using the file descriptor (fd) identified using the + * environment variable `NODE_CHANNEL_FD` on the child process. + * + * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the + * current process. + * + * The `shell` option available in {@link spawn} is not supported by`child_process.fork()` and will be ignored if set. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * if (process.argv[2] === 'child') { + * setTimeout(() => { + * console.log(`Hello from ${process.argv[2]}!`); + * }, 1_000); + * } else { + * const { fork } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = fork(__filename, ['child'], { signal }); + * child.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * } + * ``` + * @since v0.5.0 + * @param modulePath The module to run in the child. + * @param args List of string arguments. + */ + function fork(modulePath: string, options?: ForkOptions): ChildProcess; + function fork(modulePath: string, args?: ReadonlyArray, options?: ForkOptions): ChildProcess; + interface SpawnSyncOptions extends CommonSpawnOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | 'buffer' | null | undefined; + } + interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { + encoding: BufferEncoding; + } + interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { + encoding?: 'buffer' | null | undefined; + } + interface SpawnSyncReturns { + pid: number; + output: Array; + stdout: T; + stderr: T; + status: number | null; + signal: NodeJS.Signals | null; + error?: Error | undefined; + } + /** + * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the process intercepts and handles the `SIGTERM` signal + * and doesn't exit, the parent process will wait until the child process has + * exited. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawnSync(command: string): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; + function spawnSync(command: string, args: ReadonlyArray): SpawnSyncReturns; + function spawnSync(command: string, args: ReadonlyArray, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, args: ReadonlyArray, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, args?: ReadonlyArray, options?: SpawnSyncOptions): SpawnSyncReturns; + interface CommonExecOptions extends CommonOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + stdio?: StdioOptions | undefined; + killSignal?: NodeJS.Signals | number | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | 'buffer' | null | undefined; + } + interface ExecSyncOptions extends CommonExecOptions { + shell?: string | undefined; + } + interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { + encoding: BufferEncoding; + } + interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { + encoding?: 'buffer' | null | undefined; + } + /** + * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the child process intercepts and handles the `SIGTERM`signal and doesn't exit, the parent process will wait until the child process + * has exited. + * + * If the process times out or has a non-zero exit code, this method will throw. + * The `Error` object will contain the entire result from {@link spawnSync}. + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @return The stdout from the command. + */ + function execSync(command: string): Buffer; + function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string; + function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer; + function execSync(command: string, options?: ExecSyncOptions): string | Buffer; + interface ExecFileSyncOptions extends CommonExecOptions { + shell?: boolean | string | undefined; + } + interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { + encoding: BufferEncoding; + } + interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { + encoding?: 'buffer' | null; // specify `null`. + } + /** + * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not + * return until the child process has fully closed. When a timeout has been + * encountered and `killSignal` is sent, the method won't return until the process + * has completely exited. + * + * If the child process intercepts and handles the `SIGTERM` signal and + * does not exit, the parent process will still wait until the child process has + * exited. + * + * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @return The stdout from the command. + */ + function execFileSync(file: string): Buffer; + function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer; + function execFileSync(file: string, args: ReadonlyArray): Buffer; + function execFileSync(file: string, args: ReadonlyArray, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, args: ReadonlyArray, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, args?: ReadonlyArray, options?: ExecFileSyncOptions): string | Buffer; +} +declare module 'node:child_process' { + export * from 'child_process'; +} diff --git a/node_modules/@types/node/cluster.d.ts b/node_modules/@types/node/cluster.d.ts new file mode 100755 index 0000000..37dbc57 --- /dev/null +++ b/node_modules/@types/node/cluster.d.ts @@ -0,0 +1,410 @@ +/** + * Clusters of Node.js processes can be used to run multiple instances of Node.js + * that can distribute workloads among their application threads. When process + * isolation is not needed, use the `worker_threads` module instead, which + * allows running multiple application threads within a single Node.js instance. + * + * The cluster module allows easy creation of child processes that all share + * server ports. + * + * ```js + * import cluster from 'cluster'; + * import http from 'http'; + * import { cpus } from 'os'; + * import process from 'process'; + * + * const numCPUs = cpus().length; + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('exit', (worker, code, signal) => { + * console.log(`worker ${worker.process.pid} died`); + * }); + * } else { + * // Workers can share any TCP connection + * // In this case it is an HTTP server + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * + * console.log(`Worker ${process.pid} started`); + * } + * ``` + * + * Running Node.js will now share port 8000 between the workers: + * + * ```console + * $ node server.js + * Primary 3596 is running + * Worker 4324 started + * Worker 4520 started + * Worker 6056 started + * Worker 5644 started + * ``` + * + * On Windows, it is not yet possible to set up a named pipe server in a worker. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/cluster.js) + */ +declare module 'cluster' { + import * as child from 'node:child_process'; + import EventEmitter = require('node:events'); + import * as net from 'node:net'; + export interface ClusterSettings { + execArgv?: string[] | undefined; // default: process.execArgv + exec?: string | undefined; + args?: string[] | undefined; + silent?: boolean | undefined; + stdio?: any[] | undefined; + uid?: number | undefined; + gid?: number | undefined; + inspectPort?: number | (() => number) | undefined; + } + export interface Address { + address: string; + port: number; + addressType: number | 'udp4' | 'udp6'; // 4, 6, -1, "udp4", "udp6" + } + /** + * A `Worker` object contains all public information and method about a worker. + * In the primary it can be obtained using `cluster.workers`. In a worker + * it can be obtained using `cluster.worker`. + * @since v0.7.0 + */ + export class Worker extends EventEmitter { + /** + * Each new worker is given its own unique id, this id is stored in the`id`. + * + * While a worker is alive, this is the key that indexes it in`cluster.workers`. + * @since v0.8.0 + */ + id: number; + /** + * All workers are created using `child_process.fork()`, the returned object + * from this function is stored as `.process`. In a worker, the global `process`is stored. + * + * See: `Child Process module`. + * + * Workers will call `process.exit(0)` if the `'disconnect'` event occurs + * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against + * accidental disconnection. + * @since v0.7.0 + */ + process: child.ChildProcess; + /** + * Send a message to a worker or primary, optionally with a handle. + * + * In the primary, this sends a message to a specific worker. It is identical to `ChildProcess.send()`. + * + * In a worker, this sends a message to the primary. It is identical to`process.send()`. + * + * This example will echo back all messages from the primary: + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * worker.send('hi there'); + * + * } else if (cluster.isWorker) { + * process.on('message', (msg) => { + * process.send(msg); + * }); + * } + * ``` + * @since v0.7.0 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: child.Serializable, callback?: (error: Error | null) => void): boolean; + send(message: child.Serializable, sendHandle: child.SendHandle, callback?: (error: Error | null) => void): boolean; + send(message: child.Serializable, sendHandle: child.SendHandle, options?: child.MessageOptions, callback?: (error: Error | null) => void): boolean; + /** + * This function will kill the worker. In the primary worker, it does this by + * disconnecting the `worker.process`, and once disconnected, killing with`signal`. In the worker, it does it by killing the process with `signal`. + * + * The `kill()` function kills the worker process without waiting for a graceful + * disconnect, it has the same behavior as `worker.process.kill()`. + * + * This method is aliased as `worker.destroy()` for backwards compatibility. + * + * In a worker, `process.kill()` exists, but it is not this function; + * it is `kill()`. + * @since v0.9.12 + * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process. + */ + kill(signal?: string): void; + destroy(signal?: string): void; + /** + * In a worker, this function will close all servers, wait for the `'close'` event + * on those servers, and then disconnect the IPC channel. + * + * In the primary, an internal message is sent to the worker causing it to call`.disconnect()` on itself. + * + * Causes `.exitedAfterDisconnect` to be set. + * + * After a server is closed, it will no longer accept new connections, + * but connections may be accepted by any other listening worker. Existing + * connections will be allowed to close as usual. When no more connections exist, + * see `server.close()`, the IPC channel to the worker will close allowing it + * to die gracefully. + * + * The above applies _only_ to server connections, client connections are not + * automatically closed by workers, and disconnect does not wait for them to close + * before exiting. + * + * In a worker, `process.disconnect` exists, but it is not this function; + * it is `disconnect()`. + * + * Because long living server connections may block workers from disconnecting, it + * may be useful to send a message, so application specific actions may be taken to + * close them. It also may be useful to implement a timeout, killing a worker if + * the `'disconnect'` event has not been emitted after some time. + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * let timeout; + * + * worker.on('listening', (address) => { + * worker.send('shutdown'); + * worker.disconnect(); + * timeout = setTimeout(() => { + * worker.kill(); + * }, 2000); + * }); + * + * worker.on('disconnect', () => { + * clearTimeout(timeout); + * }); + * + * } else if (cluster.isWorker) { + * const net = require('net'); + * const server = net.createServer((socket) => { + * // Connections never end + * }); + * + * server.listen(8000); + * + * process.on('message', (msg) => { + * if (msg === 'shutdown') { + * // Initiate graceful close of any connections to server + * } + * }); + * } + * ``` + * @since v0.7.7 + * @return A reference to `worker`. + */ + disconnect(): void; + /** + * This function returns `true` if the worker is connected to its primary via its + * IPC channel, `false` otherwise. A worker is connected to its primary after it + * has been created. It is disconnected after the `'disconnect'` event is emitted. + * @since v0.11.14 + */ + isConnected(): boolean; + /** + * This function returns `true` if the worker's process has terminated (either + * because of exiting or being signaled). Otherwise, it returns `false`. + * + * ```js + * import cluster from 'cluster'; + * import http from 'http'; + * import { cpus } from 'os'; + * import process from 'process'; + * + * const numCPUs = cpus().length; + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('fork', (worker) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * + * cluster.on('exit', (worker, code, signal) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * } else { + * // Workers can share any TCP connection. In this case, it is an HTTP server. + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end(`Current process\n ${process.pid}`); + * process.kill(process.pid); + * }).listen(8000); + * } + * ``` + * @since v0.11.14 + */ + isDead(): boolean; + /** + * This property is `true` if the worker exited due to `.disconnect()`. + * If the worker exited any other way, it is `false`. If the + * worker has not exited, it is `undefined`. + * + * The boolean `worker.exitedAfterDisconnect` allows distinguishing between + * voluntary and accidental exit, the primary may choose not to respawn a worker + * based on this value. + * + * ```js + * cluster.on('exit', (worker, code, signal) => { + * if (worker.exitedAfterDisconnect === true) { + * console.log('Oh, it was just voluntary – no need to worry'); + * } + * }); + * + * // kill worker + * worker.kill(); + * ``` + * @since v6.0.0 + */ + exitedAfterDisconnect: boolean; + /** + * events.EventEmitter + * 1. disconnect + * 2. error + * 3. exit + * 4. listening + * 5. message + * 6. online + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'disconnect', listener: () => void): this; + addListener(event: 'error', listener: (error: Error) => void): this; + addListener(event: 'exit', listener: (code: number, signal: string) => void): this; + addListener(event: 'listening', listener: (address: Address) => void): this; + addListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: 'online', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'disconnect'): boolean; + emit(event: 'error', error: Error): boolean; + emit(event: 'exit', code: number, signal: string): boolean; + emit(event: 'listening', address: Address): boolean; + emit(event: 'message', message: any, handle: net.Socket | net.Server): boolean; + emit(event: 'online'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'disconnect', listener: () => void): this; + on(event: 'error', listener: (error: Error) => void): this; + on(event: 'exit', listener: (code: number, signal: string) => void): this; + on(event: 'listening', listener: (address: Address) => void): this; + on(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: 'online', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'disconnect', listener: () => void): this; + once(event: 'error', listener: (error: Error) => void): this; + once(event: 'exit', listener: (code: number, signal: string) => void): this; + once(event: 'listening', listener: (address: Address) => void): this; + once(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: 'online', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'disconnect', listener: () => void): this; + prependListener(event: 'error', listener: (error: Error) => void): this; + prependListener(event: 'exit', listener: (code: number, signal: string) => void): this; + prependListener(event: 'listening', listener: (address: Address) => void): this; + prependListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: 'online', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'disconnect', listener: () => void): this; + prependOnceListener(event: 'error', listener: (error: Error) => void): this; + prependOnceListener(event: 'exit', listener: (code: number, signal: string) => void): this; + prependOnceListener(event: 'listening', listener: (address: Address) => void): this; + prependOnceListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: 'online', listener: () => void): this; + } + export interface Cluster extends EventEmitter { + disconnect(callback?: () => void): void; + fork(env?: any): Worker; + /** @deprecated since v16.0.0 - use isPrimary. */ + readonly isMaster: boolean; + readonly isPrimary: boolean; + readonly isWorker: boolean; + schedulingPolicy: number; + readonly settings: ClusterSettings; + /** @deprecated since v16.0.0 - use setupPrimary. */ + setupMaster(settings?: ClusterSettings): void; + /** + * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in cluster.settings. + */ + setupPrimary(settings?: ClusterSettings): void; + readonly worker?: Worker | undefined; + readonly workers?: NodeJS.Dict | undefined; + readonly SCHED_NONE: number; + readonly SCHED_RR: number; + /** + * events.EventEmitter + * 1. disconnect + * 2. exit + * 3. fork + * 4. listening + * 5. message + * 6. online + * 7. setup + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'disconnect', listener: (worker: Worker) => void): this; + addListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + addListener(event: 'fork', listener: (worker: Worker) => void): this; + addListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + addListener(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: 'online', listener: (worker: Worker) => void): this; + addListener(event: 'setup', listener: (settings: ClusterSettings) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'disconnect', worker: Worker): boolean; + emit(event: 'exit', worker: Worker, code: number, signal: string): boolean; + emit(event: 'fork', worker: Worker): boolean; + emit(event: 'listening', worker: Worker, address: Address): boolean; + emit(event: 'message', worker: Worker, message: any, handle: net.Socket | net.Server): boolean; + emit(event: 'online', worker: Worker): boolean; + emit(event: 'setup', settings: ClusterSettings): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'disconnect', listener: (worker: Worker) => void): this; + on(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + on(event: 'fork', listener: (worker: Worker) => void): this; + on(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + on(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: 'online', listener: (worker: Worker) => void): this; + on(event: 'setup', listener: (settings: ClusterSettings) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'disconnect', listener: (worker: Worker) => void): this; + once(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + once(event: 'fork', listener: (worker: Worker) => void): this; + once(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + once(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: 'online', listener: (worker: Worker) => void): this; + once(event: 'setup', listener: (settings: ClusterSettings) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'disconnect', listener: (worker: Worker) => void): this; + prependListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + prependListener(event: 'fork', listener: (worker: Worker) => void): this; + prependListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: 'message', listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void): this; + prependListener(event: 'online', listener: (worker: Worker) => void): this; + prependListener(event: 'setup', listener: (settings: ClusterSettings) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'disconnect', listener: (worker: Worker) => void): this; + prependOnceListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + prependOnceListener(event: 'fork', listener: (worker: Worker) => void): this; + prependOnceListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; + prependOnceListener(event: 'online', listener: (worker: Worker) => void): this; + prependOnceListener(event: 'setup', listener: (settings: ClusterSettings) => void): this; + } + const cluster: Cluster; + export default cluster; +} +declare module 'node:cluster' { + export * from 'cluster'; + export { default as default } from 'cluster'; +} diff --git a/node_modules/@types/node/console.d.ts b/node_modules/@types/node/console.d.ts new file mode 100755 index 0000000..16c9137 --- /dev/null +++ b/node_modules/@types/node/console.d.ts @@ -0,0 +1,412 @@ +/** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/console.js) + */ +declare module 'console' { + import console = require('node:console'); + export = console; +} +declare module 'node:console' { + import { InspectOptions } from 'node:util'; + global { + // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build + interface Console { + Console: console.ConsoleConstructor; + /** + * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only + * writes a message and does not otherwise affect execution. The output always + * starts with `"Assertion failed"`. If provided, `message` is formatted using `util.format()`. + * + * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens. + * + * ```js + * console.assert(true, 'does nothing'); + * + * console.assert(false, 'Whoops %s work', 'didn\'t'); + * // Assertion failed: Whoops didn't work + * + * console.assert(); + * // Assertion failed + * ``` + * @since v0.1.101 + * @param value The value tested for being truthy. + * @param message All arguments besides `value` are used as error message. + */ + assert(value: any, message?: string, ...optionalParams: any[]): void; + /** + * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the + * TTY. When `stdout` is not a TTY, this method does nothing. + * + * The specific operation of `console.clear()` can vary across operating systems + * and terminal types. For most Linux operating systems, `console.clear()`operates similarly to the `clear` shell command. On Windows, `console.clear()`will clear only the output in the + * current terminal viewport for the Node.js + * binary. + * @since v8.3.0 + */ + clear(): void; + /** + * Maintains an internal counter specific to `label` and outputs to `stdout` the + * number of times `console.count()` has been called with the given `label`. + * + * ```js + * > console.count() + * default: 1 + * undefined + * > console.count('default') + * default: 2 + * undefined + * > console.count('abc') + * abc: 1 + * undefined + * > console.count('xyz') + * xyz: 1 + * undefined + * > console.count('abc') + * abc: 2 + * undefined + * > console.count() + * default: 3 + * undefined + * > + * ``` + * @since v8.3.0 + * @param label The display label for the counter. + */ + count(label?: string): void; + /** + * Resets the internal counter specific to `label`. + * + * ```js + * > console.count('abc'); + * abc: 1 + * undefined + * > console.countReset('abc'); + * undefined + * > console.count('abc'); + * abc: 1 + * undefined + * > + * ``` + * @since v8.3.0 + * @param label The display label for the counter. + */ + countReset(label?: string): void; + /** + * The `console.debug()` function is an alias for {@link log}. + * @since v8.0.0 + */ + debug(message?: any, ...optionalParams: any[]): void; + /** + * Uses `util.inspect()` on `obj` and prints the resulting string to `stdout`. + * This function bypasses any custom `inspect()` function defined on `obj`. + * @since v0.1.101 + */ + dir(obj: any, options?: InspectOptions): void; + /** + * This method calls `console.log()` passing it the arguments received. + * This method does not produce any XML formatting. + * @since v8.0.0 + */ + dirxml(...data: any[]): void; + /** + * Prints to `stderr` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const code = 5; + * console.error('error #%d', code); + * // Prints: error #5, to stderr + * console.error('error', code); + * // Prints: error 5, to stderr + * ``` + * + * If formatting elements (e.g. `%d`) are not found in the first string then `util.inspect()` is called on each argument and the resulting string + * values are concatenated. See `util.format()` for more information. + * @since v0.1.100 + */ + error(message?: any, ...optionalParams: any[]): void; + /** + * Increases indentation of subsequent lines by spaces for `groupIndentation`length. + * + * If one or more `label`s are provided, those are printed first without the + * additional indentation. + * @since v8.5.0 + */ + group(...label: any[]): void; + /** + * An alias for {@link group}. + * @since v8.5.0 + */ + groupCollapsed(...label: any[]): void; + /** + * Decreases indentation of subsequent lines by spaces for `groupIndentation`length. + * @since v8.5.0 + */ + groupEnd(): void; + /** + * The `console.info()` function is an alias for {@link log}. + * @since v0.1.100 + */ + info(message?: any, ...optionalParams: any[]): void; + /** + * Prints to `stdout` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const count = 5; + * console.log('count: %d', count); + * // Prints: count: 5, to stdout + * console.log('count:', count); + * // Prints: count: 5, to stdout + * ``` + * + * See `util.format()` for more information. + * @since v0.1.100 + */ + log(message?: any, ...optionalParams: any[]): void; + /** + * Try to construct a table with the columns of the properties of `tabularData`(or use `properties`) and rows of `tabularData` and log it. Falls back to just + * logging the argument if it can’t be parsed as tabular. + * + * ```js + * // These can't be parsed as tabular data + * console.table(Symbol()); + * // Symbol() + * + * console.table(undefined); + * // undefined + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]); + * // ┌─────────┬─────┬─────┐ + * // │ (index) │ a │ b │ + * // ├─────────┼─────┼─────┤ + * // │ 0 │ 1 │ 'Y' │ + * // │ 1 │ 'Z' │ 2 │ + * // └─────────┴─────┴─────┘ + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']); + * // ┌─────────┬─────┐ + * // │ (index) │ a │ + * // ├─────────┼─────┤ + * // │ 0 │ 1 │ + * // │ 1 │ 'Z' │ + * // └─────────┴─────┘ + * ``` + * @since v10.0.0 + * @param properties Alternate properties for constructing the table. + */ + table(tabularData: any, properties?: ReadonlyArray): void; + /** + * Starts a timer that can be used to compute the duration of an operation. Timers + * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in + * suitable time units to `stdout`. For example, if the elapsed + * time is 3869ms, `console.timeEnd()` displays "3.869s". + * @since v0.1.104 + */ + time(label?: string): void; + /** + * Stops a timer that was previously started by calling {@link time} and + * prints the result to `stdout`: + * + * ```js + * console.time('100-elements'); + * for (let i = 0; i < 100; i++) {} + * console.timeEnd('100-elements'); + * // prints 100-elements: 225.438ms + * ``` + * @since v0.1.104 + */ + timeEnd(label?: string): void; + /** + * For a timer that was previously started by calling {@link time}, prints + * the elapsed time and other `data` arguments to `stdout`: + * + * ```js + * console.time('process'); + * const value = expensiveProcess1(); // Returns 42 + * console.timeLog('process', value); + * // Prints "process: 365.227ms 42". + * doExpensiveProcess2(value); + * console.timeEnd('process'); + * ``` + * @since v10.7.0 + */ + timeLog(label?: string, ...data: any[]): void; + /** + * Prints to `stderr` the string `'Trace: '`, followed by the `util.format()` formatted message and stack trace to the current position in the code. + * + * ```js + * console.trace('Show me'); + * // Prints: (stack trace will vary based on where trace is called) + * // Trace: Show me + * // at repl:2:9 + * // at REPLServer.defaultEval (repl.js:248:27) + * // at bound (domain.js:287:14) + * // at REPLServer.runBound [as eval] (domain.js:300:12) + * // at REPLServer. (repl.js:412:12) + * // at emitOne (events.js:82:20) + * // at REPLServer.emit (events.js:169:7) + * // at REPLServer.Interface._onLine (readline.js:210:10) + * // at REPLServer.Interface._line (readline.js:549:8) + * // at REPLServer.Interface._ttyWrite (readline.js:826:14) + * ``` + * @since v0.1.104 + */ + trace(message?: any, ...optionalParams: any[]): void; + /** + * The `console.warn()` function is an alias for {@link error}. + * @since v0.1.100 + */ + warn(message?: any, ...optionalParams: any[]): void; + // --- Inspector mode only --- + /** + * This method does not display anything unless used in the inspector. + * Starts a JavaScript CPU profile with an optional label. + */ + profile(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. + */ + profileEnd(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Adds an event with the label `label` to the Timeline panel of the inspector. + */ + timeStamp(label?: string): void; + } + /** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js) + */ + namespace console { + interface ConsoleConstructorOptions { + stdout: NodeJS.WritableStream; + stderr?: NodeJS.WritableStream | undefined; + ignoreErrors?: boolean | undefined; + colorMode?: boolean | 'auto' | undefined; + inspectOptions?: InspectOptions | undefined; + /** + * Set group indentation + * @default 2 + */ + groupIndentation?: number | undefined; + } + interface ConsoleConstructor { + prototype: Console; + new (stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console; + new (options: ConsoleConstructorOptions): Console; + } + } + var console: Console; + } + export = globalThis.console; +} diff --git a/node_modules/@types/node/constants.d.ts b/node_modules/@types/node/constants.d.ts new file mode 100755 index 0000000..208020d --- /dev/null +++ b/node_modules/@types/node/constants.d.ts @@ -0,0 +1,18 @@ +/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */ +declare module 'constants' { + import { constants as osConstants, SignalConstants } from 'node:os'; + import { constants as cryptoConstants } from 'node:crypto'; + import { constants as fsConstants } from 'node:fs'; + + const exp: typeof osConstants.errno & + typeof osConstants.priority & + SignalConstants & + typeof cryptoConstants & + typeof fsConstants; + export = exp; +} + +declare module 'node:constants' { + import constants = require('constants'); + export = constants; +} diff --git a/node_modules/@types/node/crypto.d.ts b/node_modules/@types/node/crypto.d.ts new file mode 100755 index 0000000..4f61974 --- /dev/null +++ b/node_modules/@types/node/crypto.d.ts @@ -0,0 +1,3964 @@ +/** + * The `crypto` module provides cryptographic functionality that includes a set of + * wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify functions. + * + * ```js + * const { createHmac } = await import('crypto'); + * + * const secret = 'abcdefg'; + * const hash = createHmac('sha256', secret) + * .update('I love cupcakes') + * .digest('hex'); + * console.log(hash); + * // Prints: + * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/crypto.js) + */ +declare module 'crypto' { + import * as stream from 'node:stream'; + import { PeerCertificate } from 'node:tls'; + /** + * SPKAC is a Certificate Signing Request mechanism originally implemented by + * Netscape and was specified formally as part of [HTML5's `keygen` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/keygen). + * + * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects + * should not use this element anymore. + * + * The `crypto` module provides the `Certificate` class for working with SPKAC + * data. The most common usage is handling output generated by the HTML5`` element. Node.js uses [OpenSSL's SPKAC + * implementation](https://www.openssl.org/docs/man1.1.0/apps/openssl-spkac.html) internally. + * @since v0.11.8 + */ + class Certificate { + /** + * ```js + * const { Certificate } = await import('crypto'); + * const spkac = getSpkacSomehow(); + * const challenge = Certificate.exportChallenge(spkac); + * console.log(challenge.toString('utf8')); + * // Prints: the challenge as a UTF8 string + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportChallenge(spkac: BinaryLike): Buffer; + /** + * ```js + * const { Certificate } = await import('crypto'); + * const spkac = getSpkacSomehow(); + * const publicKey = Certificate.exportPublicKey(spkac); + * console.log(publicKey); + * // Prints: the public key as + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The public key component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * ```js + * import { Buffer } from 'buffer'; + * const { Certificate } = await import('crypto'); + * + * const spkac = getSpkacSomehow(); + * console.log(Certificate.verifySpkac(Buffer.from(spkac))); + * // Prints: true or false + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return `true` if the given `spkac` data structure is valid, `false` otherwise. + */ + static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + /** + * @deprecated + * @param spkac + * @returns The challenge component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportChallenge(spkac: BinaryLike): Buffer; + /** + * @deprecated + * @param spkac + * @param encoding The encoding of the spkac string. + * @returns The public key component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * @deprecated + * @param spkac + * @returns `true` if the given `spkac` data structure is valid, + * `false` otherwise. + */ + verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + } + namespace constants { + // https://nodejs.org/dist/latest-v10.x/docs/api/crypto.html#crypto_crypto_constants + const OPENSSL_VERSION_NUMBER: number; + /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */ + const SSL_OP_ALL: number; + /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; + /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_CIPHER_SERVER_PREFERENCE: number; + /** Instructs OpenSSL to use Cisco's "speshul" version of DTLS_BAD_VER. */ + const SSL_OP_CISCO_ANYCONNECT: number; + /** Instructs OpenSSL to turn on cookie exchange. */ + const SSL_OP_COOKIE_EXCHANGE: number; + /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */ + const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; + /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */ + const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; + /** Instructs OpenSSL to always use the tmp_rsa key when performing RSA operations. */ + const SSL_OP_EPHEMERAL_RSA: number; + /** Allows initial connection to servers that do not support RI. */ + const SSL_OP_LEGACY_SERVER_CONNECT: number; + const SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number; + const SSL_OP_MICROSOFT_SESS_ID_BUG: number; + /** Instructs OpenSSL to disable the workaround for a man-in-the-middle protocol-version vulnerability in the SSL 2.0 server implementation. */ + const SSL_OP_MSIE_SSLV2_RSA_PADDING: number; + const SSL_OP_NETSCAPE_CA_DN_BUG: number; + const SSL_OP_NETSCAPE_CHALLENGE_BUG: number; + const SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number; + const SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number; + /** Instructs OpenSSL to disable support for SSL/TLS compression. */ + const SSL_OP_NO_COMPRESSION: number; + const SSL_OP_NO_QUERY_MTU: number; + /** Instructs OpenSSL to always start a new session when performing renegotiation. */ + const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; + const SSL_OP_NO_SSLv2: number; + const SSL_OP_NO_SSLv3: number; + const SSL_OP_NO_TICKET: number; + const SSL_OP_NO_TLSv1: number; + const SSL_OP_NO_TLSv1_1: number; + const SSL_OP_NO_TLSv1_2: number; + const SSL_OP_PKCS1_CHECK_1: number; + const SSL_OP_PKCS1_CHECK_2: number; + /** Instructs OpenSSL to always create a new key when using temporary/ephemeral DH parameters. */ + const SSL_OP_SINGLE_DH_USE: number; + /** Instructs OpenSSL to always create a new key when using temporary/ephemeral ECDH parameters. */ + const SSL_OP_SINGLE_ECDH_USE: number; + const SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number; + const SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number; + const SSL_OP_TLS_BLOCK_PADDING_BUG: number; + const SSL_OP_TLS_D5_BUG: number; + /** Instructs OpenSSL to disable version rollback attack detection. */ + const SSL_OP_TLS_ROLLBACK_BUG: number; + const ENGINE_METHOD_RSA: number; + const ENGINE_METHOD_DSA: number; + const ENGINE_METHOD_DH: number; + const ENGINE_METHOD_RAND: number; + const ENGINE_METHOD_EC: number; + const ENGINE_METHOD_CIPHERS: number; + const ENGINE_METHOD_DIGESTS: number; + const ENGINE_METHOD_PKEY_METHS: number; + const ENGINE_METHOD_PKEY_ASN1_METHS: number; + const ENGINE_METHOD_ALL: number; + const ENGINE_METHOD_NONE: number; + const DH_CHECK_P_NOT_SAFE_PRIME: number; + const DH_CHECK_P_NOT_PRIME: number; + const DH_UNABLE_TO_CHECK_GENERATOR: number; + const DH_NOT_SUITABLE_GENERATOR: number; + const ALPN_ENABLED: number; + const RSA_PKCS1_PADDING: number; + const RSA_SSLV23_PADDING: number; + const RSA_NO_PADDING: number; + const RSA_PKCS1_OAEP_PADDING: number; + const RSA_X931_PADDING: number; + const RSA_PKCS1_PSS_PADDING: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */ + const RSA_PSS_SALTLEN_DIGEST: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */ + const RSA_PSS_SALTLEN_MAX_SIGN: number; + /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */ + const RSA_PSS_SALTLEN_AUTO: number; + const POINT_CONVERSION_COMPRESSED: number; + const POINT_CONVERSION_UNCOMPRESSED: number; + const POINT_CONVERSION_HYBRID: number; + /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */ + const defaultCoreCipherList: string; + /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */ + const defaultCipherList: string; + } + interface HashOptions extends stream.TransformOptions { + /** + * For XOF hash functions such as `shake256`, the + * outputLength option can be used to specify the desired output length in bytes. + */ + outputLength?: number | undefined; + } + /** @deprecated since v10.0.0 */ + const fips: boolean; + /** + * Creates and returns a `Hash` object that can be used to generate hash digests + * using the given `algorithm`. Optional `options` argument controls stream + * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option + * can be used to specify the desired output length in bytes. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * Example: generating the sha256 sum of a file + * + * ```js + * import { + * createReadStream + * } from 'fs'; + * import { argv } from 'process'; + * const { + * createHash + * } = await import('crypto'); + * + * const filename = argv[2]; + * + * const hash = createHash('sha256'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hash.update(data); + * else { + * console.log(`${hash.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.92 + * @param options `stream.transform` options + */ + function createHash(algorithm: string, options?: HashOptions): Hash; + /** + * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`. + * Optional `options` argument controls stream behavior. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is + * a `KeyObject`, its type must be `secret`. + * + * Example: generating the sha256 HMAC of a file + * + * ```js + * import { + * createReadStream + * } from 'fs'; + * import { argv } from 'process'; + * const { + * createHmac + * } = await import('crypto'); + * + * const filename = argv[2]; + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hmac.update(data); + * else { + * console.log(`${hmac.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac; + // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + type BinaryToTextEncoding = 'base64' | 'base64url' | 'hex' | 'binary'; + type CharacterEncoding = 'utf8' | 'utf-8' | 'utf16le' | 'latin1'; + type LegacyCharacterEncoding = 'ascii' | 'binary' | 'ucs2' | 'ucs-2'; + type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding; + type ECDHKeyFormat = 'compressed' | 'uncompressed' | 'hybrid'; + /** + * The `Hash` class is a utility for creating hash digests of data. It can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed hash digest on the readable side, or + * * Using the `hash.update()` and `hash.digest()` methods to produce the + * computed hash. + * + * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hash` objects as streams: + * + * ```js + * const { + * createHash + * } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * hash.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hash.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * } + * }); + * + * hash.write('some data to hash'); + * hash.end(); + * ``` + * + * Example: Using `Hash` and piped streams: + * + * ```js + * import { createReadStream } from 'fs'; + * import { stdout } from 'process'; + * const { createHash } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * const input = createReadStream('test.js'); + * input.pipe(hash).setEncoding('hex').pipe(stdout); + * ``` + * + * Example: Using the `hash.update()` and `hash.digest()` methods: + * + * ```js + * const { + * createHash + * } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('some data to hash'); + * console.log(hash.digest('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * ``` + * @since v0.1.92 + */ + class Hash extends stream.Transform { + private constructor(); + /** + * Creates a new `Hash` object that contains a deep copy of the internal state + * of the current `Hash` object. + * + * The optional `options` argument controls stream behavior. For XOF hash + * functions such as `'shake256'`, the `outputLength` option can be used to + * specify the desired output length in bytes. + * + * An error is thrown when an attempt is made to copy the `Hash` object after + * its `hash.digest()` method has been called. + * + * ```js + * // Calculate a rolling hash. + * const { + * createHash + * } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('one'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('two'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('three'); + * console.log(hash.copy().digest('hex')); + * + * // Etc. + * ``` + * @since v13.1.0 + * @param options `stream.transform` options + */ + copy(options?: stream.TransformOptions): Hash; + /** + * Updates the hash content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hash; + update(data: string, inputEncoding: Encoding): Hash; + /** + * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method). + * If `encoding` is provided a string will be returned; otherwise + * a `Buffer` is returned. + * + * The `Hash` object can not be used again after `hash.digest()` method has been + * called. Multiple calls will cause an error to be thrown. + * @since v0.1.92 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + /** + * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can + * be used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed HMAC digest on the readable side, or + * * Using the `hmac.update()` and `hmac.digest()` methods to produce the + * computed HMAC digest. + * + * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hmac` objects as streams: + * + * ```js + * const { + * createHmac + * } = await import('crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hmac.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * } + * }); + * + * hmac.write('some data to hash'); + * hmac.end(); + * ``` + * + * Example: Using `Hmac` and piped streams: + * + * ```js + * import { createReadStream } from 'fs'; + * import { stdout } from 'process'; + * const { + * createHmac + * } = await import('crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream('test.js'); + * input.pipe(hmac).pipe(stdout); + * ``` + * + * Example: Using the `hmac.update()` and `hmac.digest()` methods: + * + * ```js + * const { + * createHmac + * } = await import('crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.update('some data to hash'); + * console.log(hmac.digest('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * ``` + * @since v0.1.94 + */ + class Hmac extends stream.Transform { + private constructor(); + /** + * Updates the `Hmac` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hmac; + update(data: string, inputEncoding: Encoding): Hmac; + /** + * Calculates the HMAC digest of all of the data passed using `hmac.update()`. + * If `encoding` is + * provided a string is returned; otherwise a `Buffer` is returned; + * + * The `Hmac` object can not be used again after `hmac.digest()` has been + * called. Multiple calls to `hmac.digest()` will result in an error being thrown. + * @since v0.1.94 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + type KeyObjectType = 'secret' | 'public' | 'private'; + interface KeyExportOptions { + type: 'pkcs1' | 'spki' | 'pkcs8' | 'sec1'; + format: T; + cipher?: string | undefined; + passphrase?: string | Buffer | undefined; + } + interface JwkKeyExportOptions { + format: 'jwk'; + } + interface JsonWebKey { + crv?: string | undefined; + d?: string | undefined; + dp?: string | undefined; + dq?: string | undefined; + e?: string | undefined; + k?: string | undefined; + kty?: string | undefined; + n?: string | undefined; + p?: string | undefined; + q?: string | undefined; + qi?: string | undefined; + x?: string | undefined; + y?: string | undefined; + [key: string]: unknown; + } + interface AsymmetricKeyDetails { + /** + * Key size in bits (RSA, DSA). + */ + modulusLength?: number | undefined; + /** + * Public exponent (RSA). + */ + publicExponent?: bigint | undefined; + /** + * Name of the message digest (RSA-PSS). + */ + hashAlgorithm?: string | undefined; + /** + * Name of the message digest used by MGF1 (RSA-PSS). + */ + mgf1HashAlgorithm?: string | undefined; + /** + * Minimal salt length in bytes (RSA-PSS). + */ + saltLength?: number | undefined; + /** + * Size of q in bits (DSA). + */ + divisorLength?: number | undefined; + /** + * Name of the curve (EC). + */ + namedCurve?: string | undefined; + } + /** + * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key, + * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject` + * objects are not to be created directly using the `new`keyword. + * + * Most applications should consider using the new `KeyObject` API instead of + * passing keys as strings or `Buffer`s due to improved security features. + * + * `KeyObject` instances can be passed to other threads via `postMessage()`. + * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to + * be listed in the `transferList` argument. + * @since v11.6.0 + */ + class KeyObject { + private constructor(); + /** + * Example: Converting a `CryptoKey` instance to a `KeyObject`: + * + * ```js + * const { webcrypto, KeyObject } = await import('crypto'); + * const { subtle } = webcrypto; + * + * const key = await subtle.generateKey({ + * name: 'HMAC', + * hash: 'SHA-256', + * length: 256 + * }, true, ['sign', 'verify']); + * + * const keyObject = KeyObject.from(key); + * console.log(keyObject.symmetricKeySize); + * // Prints: 32 (symmetric key size in bytes) + * ``` + * @since v15.0.0 + */ + static from(key: webcrypto.CryptoKey): KeyObject; + /** + * For asymmetric keys, this property represents the type of the key. Supported key + * types are: + * + * * `'rsa'` (OID 1.2.840.113549.1.1.1) + * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10) + * * `'dsa'` (OID 1.2.840.10040.4.1) + * * `'ec'` (OID 1.2.840.10045.2.1) + * * `'x25519'` (OID 1.3.101.110) + * * `'x448'` (OID 1.3.101.111) + * * `'ed25519'` (OID 1.3.101.112) + * * `'ed448'` (OID 1.3.101.113) + * * `'dh'` (OID 1.2.840.113549.1.3.1) + * + * This property is `undefined` for unrecognized `KeyObject` types and symmetric + * keys. + * @since v11.6.0 + */ + asymmetricKeyType?: KeyType | undefined; + /** + * For asymmetric keys, this property represents the size of the embedded key in + * bytes. This property is `undefined` for symmetric keys. + */ + asymmetricKeySize?: number | undefined; + /** + * This property exists only on asymmetric keys. Depending on the type of the key, + * this object contains information about the key. None of the information obtained + * through this property can be used to uniquely identify a key or to compromise + * the security of the key. + * + * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence, + * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be + * set. + * + * Other key details might be exposed via this API using additional attributes. + * @since v15.7.0 + */ + asymmetricKeyDetails?: AsymmetricKeyDetails | undefined; + /** + * For symmetric keys, the following encoding options can be used: + * + * For public keys, the following encoding options can be used: + * + * For private keys, the following encoding options can be used: + * + * The result type depends on the selected encoding format, when PEM the + * result is a string, when DER it will be a buffer containing the data + * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object. + * + * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are + * ignored. + * + * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of + * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be + * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for + * encrypted private keys. Since PKCS#8 defines its own + * encryption mechanism, PEM-level encryption is not supported when encrypting + * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for + * PKCS#1 and SEC1 encryption. + * @since v11.6.0 + */ + export(options: KeyExportOptions<'pem'>): string | Buffer; + export(options?: KeyExportOptions<'der'>): Buffer; + export(options?: JwkKeyExportOptions): JsonWebKey; + /** + * For secret keys, this property represents the size of the key in bytes. This + * property is `undefined` for asymmetric keys. + * @since v11.6.0 + */ + symmetricKeySize?: number | undefined; + /** + * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys + * or `'private'` for private (asymmetric) keys. + * @since v11.6.0 + */ + type: KeyObjectType; + } + type CipherCCMTypes = 'aes-128-ccm' | 'aes-192-ccm' | 'aes-256-ccm' | 'chacha20-poly1305'; + type CipherGCMTypes = 'aes-128-gcm' | 'aes-192-gcm' | 'aes-256-gcm'; + type CipherOCBTypes = 'aes-128-ocb' | 'aes-192-ocb' | 'aes-256-ocb'; + type BinaryLike = string | NodeJS.ArrayBufferView; + type CipherKey = BinaryLike | KeyObject; + interface CipherCCMOptions extends stream.TransformOptions { + authTagLength: number; + } + interface CipherGCMOptions extends stream.TransformOptions { + authTagLength?: number | undefined; + } + interface CipherOCBOptions extends stream.TransformOptions { + authTagLength: number; + } + /** + * Creates and returns a `Cipher` object that uses the given `algorithm` and`password`. + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `password` is used to derive the cipher key and initialization vector (IV). + * The value must be either a `'latin1'` encoded string, a `Buffer`, a`TypedArray`, or a `DataView`. + * + * The implementation of `crypto.createCipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createCipheriv} to create the `Cipher` object. Users should not use ciphers with counter mode + * (e.g. CTR, GCM, or CCM) in `crypto.createCipher()`. A warning is emitted when + * they are used in order to avoid the risk of IV reuse that causes + * vulnerabilities. For the case when IV is reused in GCM, see [Nonce-Disrespecting Adversaries](https://github.com/nonce-disrespect/nonce-disrespect) for details. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createCipheriv} instead. + * @param options `stream.transform` options + */ + function createCipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): CipherCCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): CipherGCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Cipher; + /** + * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and + * initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a + * given IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createCipheriv(algorithm: CipherCCMTypes, key: CipherKey, iv: BinaryLike, options: CipherCCMOptions): CipherCCM; + function createCipheriv(algorithm: CipherOCBTypes, key: CipherKey, iv: BinaryLike, options: CipherOCBOptions): CipherOCB; + function createCipheriv(algorithm: CipherGCMTypes, key: CipherKey, iv: BinaryLike, options?: CipherGCMOptions): CipherGCM; + function createCipheriv(algorithm: string, key: CipherKey, iv: BinaryLike | null, options?: stream.TransformOptions): Cipher; + /** + * Instances of the `Cipher` class are used to encrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain unencrypted + * data is written to produce encrypted data on the readable side, or + * * Using the `cipher.update()` and `cipher.final()` methods to produce + * the encrypted data. + * + * The {@link createCipher} or {@link createCipheriv} methods are + * used to create `Cipher` instances. `Cipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Cipher` objects as streams: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * // Once we have the key and iv, we can create and use the cipher... + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = ''; + * cipher.setEncoding('hex'); + * + * cipher.on('data', (chunk) => encrypted += chunk); + * cipher.on('end', () => console.log(encrypted)); + * + * cipher.write('some clear text data'); + * cipher.end(); + * }); + * }); + * ``` + * + * Example: Using `Cipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'fs'; + * + * import { + * pipeline + * } from 'stream'; + * + * const { + * scrypt, + * randomFill, + * createCipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.js'); + * const output = createWriteStream('test.enc'); + * + * pipeline(input, cipher, output, (err) => { + * if (err) throw err; + * }); + * }); + * }); + * ``` + * + * Example: Using the `cipher.update()` and `cipher.final()` methods: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = cipher.update('some clear text data', 'utf8', 'hex'); + * encrypted += cipher.final('hex'); + * console.log(encrypted); + * }); + * }); + * ``` + * @since v0.1.94 + */ + class Cipher extends stream.Transform { + private constructor(); + /** + * Updates the cipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or`DataView`. If `data` is a `Buffer`, + * `TypedArray`, or `DataView`, then`inputEncoding` is ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being + * thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the data. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: BinaryLike): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `cipher.final()` method has been called, the `Cipher` object can no + * longer be used to encrypt data. Attempts to call `cipher.final()` more than + * once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When using block encryption algorithms, the `Cipher` class will automatically + * add padding to the input data to the appropriate block size. To disable the + * default padding call `cipher.setAutoPadding(false)`. + * + * When `autoPadding` is `false`, the length of the entire input data must be a + * multiple of the cipher's block size or `cipher.final()` will throw an error. + * Disabling automatic padding is useful for non-standard padding, for instance + * using `0x0` instead of PKCS padding. + * + * The `cipher.setAutoPadding()` method must be called before `cipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(autoPadding?: boolean): this; + } + interface CipherCCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + } + ): this; + getAuthTag(): Buffer; + } + interface CipherGCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + getAuthTag(): Buffer; + } + interface CipherOCB extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + getAuthTag(): Buffer; + } + /** + * Creates and returns a `Decipher` object that uses the given `algorithm` and`password` (key). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The implementation of `crypto.createDecipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createDecipheriv} to create the `Decipher` object. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createDecipheriv} instead. + * @param options `stream.transform` options + */ + function createDecipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): DecipherCCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Decipher; + /** + * Creates and returns a `Decipher` object that uses the given `algorithm`, `key`and initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to restrict accepted authentication tags + * to those with the specified length. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a given + * IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createDecipheriv(algorithm: CipherCCMTypes, key: CipherKey, iv: BinaryLike, options: CipherCCMOptions): DecipherCCM; + function createDecipheriv(algorithm: CipherOCBTypes, key: CipherKey, iv: BinaryLike, options: CipherOCBOptions): DecipherOCB; + function createDecipheriv(algorithm: CipherGCMTypes, key: CipherKey, iv: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + function createDecipheriv(algorithm: string, key: CipherKey, iv: BinaryLike | null, options?: stream.TransformOptions): Decipher; + /** + * Instances of the `Decipher` class are used to decrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain encrypted + * data is written to produce unencrypted data on the readable side, or + * * Using the `decipher.update()` and `decipher.final()` methods to + * produce the unencrypted data. + * + * The {@link createDecipher} or {@link createDecipheriv} methods are + * used to create `Decipher` instances. `Decipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Decipher` objects as streams: + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * scryptSync, + * createDecipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Key length is dependent on the algorithm. In this case for aes192, it is + * // 24 bytes (192 bits). + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * let decrypted = ''; + * decipher.on('readable', () => { + * while (null !== (chunk = decipher.read())) { + * decrypted += chunk.toString('utf8'); + * } + * }); + * decipher.on('end', () => { + * console.log(decrypted); + * // Prints: some clear text data + * }); + * + * // Encrypted with same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * decipher.write(encrypted, 'hex'); + * decipher.end(); + * ``` + * + * Example: Using `Decipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'fs'; + * import { Buffer } from 'buffer'; + * const { + * scryptSync, + * createDecipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.enc'); + * const output = createWriteStream('test.js'); + * + * input.pipe(decipher).pipe(output); + * ``` + * + * Example: Using the `decipher.update()` and `decipher.final()` methods: + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * scryptSync, + * createDecipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * // Encrypted using same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + * decrypted += decipher.final('utf8'); + * console.log(decrypted); + * // Prints: some clear text data + * ``` + * @since v0.1.94 + */ + class Decipher extends stream.Transform { + private constructor(); + /** + * Updates the decipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is + * ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error + * being thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: NodeJS.ArrayBufferView): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `decipher.final()` method has been called, the `Decipher` object can + * no longer be used to decrypt data. Attempts to call `decipher.final()` more + * than once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When data has been encrypted without standard block padding, calling`decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and + * removing padding. + * + * Turning auto padding off will only work if the input data's length is a + * multiple of the ciphers block size. + * + * The `decipher.setAutoPadding()` method must be called before `decipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(auto_padding?: boolean): this; + } + interface DecipherCCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + } + ): this; + } + interface DecipherGCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + } + interface DecipherOCB extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + } + interface PrivateKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: 'pkcs1' | 'pkcs8' | 'sec1' | undefined; + passphrase?: string | Buffer | undefined; + } + interface PublicKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: 'pkcs1' | 'spki' | undefined; + } + /** + * Asynchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKey + * } = await import('crypto'); + * + * generateKey('hmac', { length: 64 }, (err, key) => { + * if (err) throw err; + * console.log(key.export().toString('hex')); // 46e..........620 + * }); + * ``` + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKey( + type: 'hmac' | 'aes', + options: { + length: number; + }, + callback: (err: Error | null, key: KeyObject) => void + ): void; + /** + * Synchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKeySync + * } = await import('crypto'); + * + * const key = generateKeySync('hmac', { length: 64 }); + * console.log(key.export().toString('hex')); // e89..........41e + * ``` + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKeySync( + type: 'hmac' | 'aes', + options: { + length: number; + } + ): KeyObject; + interface JsonWebKeyInput { + key: JsonWebKey; + format: 'jwk'; + } + /** + * Creates and returns a new key object containing a private key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key`must be an object with the properties described above. + * + * If the private key is encrypted, a `passphrase` must be specified. The length + * of the passphrase is limited to 1024 bytes. + * @since v11.6.0 + */ + function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a public key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject`with type `'private'`, the public key is derived from the given private key; + * otherwise, `key` must be an object with the properties described above. + * + * If the format is `'pem'`, the `'key'` may also be an X.509 certificate. + * + * Because public keys can be derived from private keys, a private key may be + * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the + * returned `KeyObject` will be `'public'` and that the private key cannot be + * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type`'private'` is given, a new `KeyObject` with type `'public'` will be returned + * and it will be impossible to extract the private key from the returned object. + * @since v11.6.0 + */ + function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a secret key for symmetric + * encryption or `Hmac`. + * @since v11.6.0 + * @param encoding The string encoding when `key` is a string. + */ + function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject; + function createSecretKey(key: string, encoding: BufferEncoding): KeyObject; + /** + * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms. + * Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Sign` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createSign(algorithm: string, options?: stream.WritableOptions): Sign; + type DSAEncoding = 'der' | 'ieee-p1363'; + interface SigningOptions { + /** + * @See crypto.constants.RSA_PKCS1_PADDING + */ + padding?: number | undefined; + saltLength?: number | undefined; + dsaEncoding?: DSAEncoding | undefined; + } + interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {} + interface SignKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {} + interface VerifyKeyObjectInput extends SigningOptions { + key: KeyObject; + } + type KeyLike = string | Buffer | KeyObject; + /** + * The `Sign` class is a utility for generating signatures. It can be used in one + * of two ways: + * + * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or + * * Using the `sign.update()` and `sign.sign()` methods to produce the + * signature. + * + * The {@link createSign} method is used to create `Sign` instances. The + * argument is the string name of the hash function to use. `Sign` objects are not + * to be created directly using the `new` keyword. + * + * Example: Using `Sign` and `Verify` objects as streams: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify + * } = await import('crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('ec', { + * namedCurve: 'sect239k1' + * }); + * + * const sign = createSign('SHA256'); + * sign.write('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey, 'hex'); + * + * const verify = createVerify('SHA256'); + * verify.write('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature, 'hex')); + * // Prints: true + * ``` + * + * Example: Using the `sign.update()` and `verify.update()` methods: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify + * } = await import('crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('rsa', { + * modulusLength: 2048, + * }); + * + * const sign = createSign('SHA256'); + * sign.update('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey); + * + * const verify = createVerify('SHA256'); + * verify.update('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature)); + * // Prints: true + * ``` + * @since v0.1.92 + */ + class Sign extends stream.Writable { + private constructor(); + /** + * Updates the `Sign` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): this; + update(data: string, inputEncoding: Encoding): this; + /** + * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the following additional properties can be passed: + * + * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * The `Sign` object can not be again used after `sign.sign()` method has been + * called. Multiple calls to `sign.sign()` will result in an error being thrown. + * @since v0.1.92 + */ + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, outputFormat: BinaryToTextEncoding): string; + } + /** + * Creates and returns a `Verify` object that uses the given algorithm. + * Use {@link getHashes} to obtain an array of names of the available + * signing algorithms. Optional `options` argument controls the`stream.Writable` behavior. + * + * In some cases, a `Verify` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createVerify(algorithm: string, options?: stream.WritableOptions): Verify; + /** + * The `Verify` class is a utility for verifying signatures. It can be used in one + * of two ways: + * + * * As a writable `stream` where written data is used to validate against the + * supplied signature, or + * * Using the `verify.update()` and `verify.verify()` methods to verify + * the signature. + * + * The {@link createVerify} method is used to create `Verify` instances.`Verify` objects are not to be created directly using the `new` keyword. + * + * See `Sign` for examples. + * @since v0.1.92 + */ + class Verify extends stream.Writable { + private constructor(); + /** + * Updates the `Verify` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `inputEncoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Verify; + update(data: string, inputEncoding: Encoding): Verify; + /** + * Verifies the provided data using the given `object` and `signature`. + * + * If `object` is not a `KeyObject`, this function behaves as if`object` had been passed to {@link createPublicKey}. If it is an + * object, the following additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the data, in + * the `signatureEncoding`. + * If a `signatureEncoding` is specified, the `signature` is expected to be a + * string; otherwise `signature` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * The `verify` object can not be used again after `verify.verify()` has been + * called. Multiple calls to `verify.verify()` will result in an error being + * thrown. + * + * Because public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.1.92 + */ + verify(object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, signature: NodeJS.ArrayBufferView): boolean; + verify(object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, signature: string, signature_format?: BinaryToTextEncoding): boolean; + } + /** + * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an + * optional specific `generator`. + * + * The `generator` argument can be a number, string, or `Buffer`. If`generator` is not specified, the value `2` is used. + * + * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise + * a `Buffer`, `TypedArray`, or `DataView` is expected. + * + * If `generatorEncoding` is specified, `generator` is expected to be a string; + * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected. + * @since v0.11.12 + * @param primeEncoding The `encoding` of the `prime` string. + * @param [generator=2] + * @param generatorEncoding The `encoding` of the `generator` string. + */ + function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman; + function createDiffieHellman(prime: ArrayBuffer | NodeJS.ArrayBufferView, generator?: number | ArrayBuffer | NodeJS.ArrayBufferView): DiffieHellman; + function createDiffieHellman(prime: ArrayBuffer | NodeJS.ArrayBufferView, generator: string, generatorEncoding: BinaryToTextEncoding): DiffieHellman; + function createDiffieHellman(prime: string, primeEncoding: BinaryToTextEncoding, generator?: number | ArrayBuffer | NodeJS.ArrayBufferView): DiffieHellman; + function createDiffieHellman(prime: string, primeEncoding: BinaryToTextEncoding, generator: string, generatorEncoding: BinaryToTextEncoding): DiffieHellman; + /** + * The `DiffieHellman` class is a utility for creating Diffie-Hellman key + * exchanges. + * + * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function. + * + * ```js + * import assert from 'assert'; + * + * const { + * createDiffieHellman + * } = await import('crypto'); + * + * // Generate Alice's keys... + * const alice = createDiffieHellman(2048); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator()); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * // OK + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * ``` + * @since v0.5.0 + */ + class DiffieHellman { + private constructor(); + /** + * Generates private and public Diffie-Hellman key values, and returns + * the public key in the specified `encoding`. This key should be + * transferred to the other party. + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using the specified `inputEncoding`, and secret is + * encoded using specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned. + * @since v0.5.0 + * @param inputEncoding The `encoding` of an `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding: null, outputEncoding: BinaryToTextEncoding): string; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman prime in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrime(): Buffer; + getPrime(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman generator in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getGenerator(): Buffer; + getGenerator(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman public key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPublicKey(): Buffer; + getPublicKey(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman private key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * Sets the Diffie-Hellman public key. If the `encoding` argument is provided,`publicKey` is expected + * to be a string. If no `encoding` is provided, `publicKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `publicKey` string. + */ + setPublicKey(publicKey: NodeJS.ArrayBufferView): void; + setPublicKey(publicKey: string, encoding: BufferEncoding): void; + /** + * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected + * to be a string. If no `encoding` is provided, `privateKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BufferEncoding): void; + /** + * A bit field containing any warnings and/or errors resulting from a check + * performed during initialization of the `DiffieHellman` object. + * + * The following values are valid for this property (as defined in `constants`module): + * + * * `DH_CHECK_P_NOT_SAFE_PRIME` + * * `DH_CHECK_P_NOT_PRIME` + * * `DH_UNABLE_TO_CHECK_GENERATOR` + * * `DH_NOT_SUITABLE_GENERATOR` + * @since v0.11.12 + */ + verifyError: number; + } + /** + * The `DiffieHellmanGroup` class takes a well-known modp group as its argument. + * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation. + * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods. + * + * ```js + * const { createDiffieHellmanGroup } = await import('node:crypto'); + * const dh = createDiffieHellmanGroup('modp1'); + * ``` + * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt): + * ```bash + * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h + * modp1 # 768 bits + * modp2 # 1024 bits + * modp5 # 1536 bits + * modp14 # 2048 bits + * modp15 # etc. + * modp16 + * modp17 + * modp18 + * ``` + * @since v0.7.5 + */ + const DiffieHellmanGroup: DiffieHellmanGroupConstructor; + interface DiffieHellmanGroupConstructor { + new(name: string): DiffieHellmanGroup; + (name: string): DiffieHellmanGroup; + readonly prototype: DiffieHellmanGroup; + } + type DiffieHellmanGroup = Omit; + /** + * Creates a predefined `DiffieHellmanGroup` key exchange object. The + * supported groups are: `'modp1'`, `'modp2'`, `'modp5'` (defined in [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt), but see `Caveats`) and `'modp14'`, `'modp15'`,`'modp16'`, `'modp17'`, + * `'modp18'` (defined in [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt)). The + * returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing + * the keys (with `diffieHellman.setPublicKey()`, for example). The + * advantage of using this method is that the parties do not have to + * generate nor exchange a group modulus beforehand, saving both processor + * and communication time. + * + * Example (obtaining a shared secret): + * + * ```js + * const { + * getDiffieHellman + * } = await import('crypto'); + * const alice = getDiffieHellman('modp14'); + * const bob = getDiffieHellman('modp14'); + * + * alice.generateKeys(); + * bob.generateKeys(); + * + * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex'); + * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex'); + * + * // aliceSecret and bobSecret should be the same + * console.log(aliceSecret === bobSecret); + * ``` + * @since v0.7.5 + */ + function getDiffieHellman(groupName: string): DiffieHellmanGroup; + /** + * An alias for {@link getDiffieHellman} + * @since v0.9.3 + */ + function createDiffieHellmanGroup(name: string): DiffieHellmanGroup; + /** + * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an error occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. By default, the successfully generated`derivedKey` will be passed to the callback as a `Buffer`. An error will be + * thrown if any of the input arguments specify invalid values or types. + * + * If `digest` is `null`, `'sha1'` will be used. This behavior is deprecated, + * please specify a `digest` explicitly. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2 + * } = await import('crypto'); + * + * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * ``` + * + * The `crypto.DEFAULT_ENCODING` property can be used to change the way the`derivedKey` is passed to the callback. This property, however, has been + * deprecated and use should be avoided. + * + * ```js + * import crypto from 'crypto'; + * crypto.DEFAULT_ENCODING = 'hex'; + * crypto.pbkdf2('secret', 'salt', 100000, 512, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey); // '3745e48...aa39b34' + * }); + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * @since v0.5.5 + */ + function pbkdf2(password: BinaryLike, salt: BinaryLike, iterations: number, keylen: number, digest: string, callback: (err: Error | null, derivedKey: Buffer) => void): void; + /** + * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * If an error occurs an `Error` will be thrown, otherwise the derived key will be + * returned as a `Buffer`. + * + * If `digest` is `null`, `'sha1'` will be used. This behavior is deprecated, + * please specify a `digest` explicitly. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2Sync + * } = await import('crypto'); + * + * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); + * console.log(key.toString('hex')); // '3745e48...08d59ae' + * ``` + * + * The `crypto.DEFAULT_ENCODING` property may be used to change the way the`derivedKey` is returned. This property, however, is deprecated and use + * should be avoided. + * + * ```js + * import crypto from 'crypto'; + * crypto.DEFAULT_ENCODING = 'hex'; + * const key = crypto.pbkdf2Sync('secret', 'salt', 100000, 512, 'sha512'); + * console.log(key); // '3745e48...aa39b34' + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * @since v0.9.3 + */ + function pbkdf2Sync(password: BinaryLike, salt: BinaryLike, iterations: number, keylen: number, digest: string): Buffer; + /** + * Generates cryptographically strong pseudorandom data. The `size` argument + * is a number indicating the number of bytes to generate. + * + * If a `callback` function is provided, the bytes are generated asynchronously + * and the `callback` function is invoked with two arguments: `err` and `buf`. + * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The`buf` argument is a `Buffer` containing the generated bytes. + * + * ```js + * // Asynchronous + * const { + * randomBytes + * } = await import('crypto'); + * + * randomBytes(256, (err, buf) => { + * if (err) throw err; + * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`); + * }); + * ``` + * + * If the `callback` function is not provided, the random bytes are generated + * synchronously and returned as a `Buffer`. An error will be thrown if + * there is a problem generating the bytes. + * + * ```js + * // Synchronous + * const { + * randomBytes + * } = await import('crypto'); + * + * const buf = randomBytes(256); + * console.log( + * `${buf.length} bytes of random data: ${buf.toString('hex')}`); + * ``` + * + * The `crypto.randomBytes()` method will not complete until there is + * sufficient entropy available. + * This should normally never take longer than a few milliseconds. The only time + * when generating the random bytes may conceivably block for a longer period of + * time is right after boot, when the whole system is still low on entropy. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomBytes()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomBytes` requests when doing so as part of fulfilling a client + * request. + * @since v0.5.8 + * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`. + * @return if the `callback` function is not provided. + */ + function randomBytes(size: number): Buffer; + function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + function pseudoRandomBytes(size: number): Buffer; + function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + /** + * Return a random integer `n` such that `min <= n < max`. This + * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias). + * + * The range (`max - min`) must be less than 2^48. `min` and `max` must + * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + * + * If the `callback` function is not provided, the random integer is + * generated synchronously. + * + * ```js + * // Asynchronous + * const { + * randomInt + * } = await import('crypto'); + * + * randomInt(3, (err, n) => { + * if (err) throw err; + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * }); + * ``` + * + * ```js + * // Synchronous + * const { + * randomInt + * } = await import('crypto'); + * + * const n = randomInt(3); + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * ``` + * + * ```js + * // With `min` argument + * const { + * randomInt + * } = await import('crypto'); + * + * const n = randomInt(1, 7); + * console.log(`The dice rolled: ${n}`); + * ``` + * @since v14.10.0, v12.19.0 + * @param [min=0] Start of random range (inclusive). + * @param max End of random range (exclusive). + * @param callback `function(err, n) {}`. + */ + function randomInt(max: number): number; + function randomInt(min: number, max: number): number; + function randomInt(max: number, callback: (err: Error | null, value: number) => void): void; + function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void; + /** + * Synchronous version of {@link randomFill}. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFillSync } = await import('crypto'); + * + * const buf = Buffer.alloc(10); + * console.log(randomFillSync(buf).toString('hex')); + * + * randomFillSync(buf, 5); + * console.log(buf.toString('hex')); + * + * // The above is equivalent to the following: + * randomFillSync(buf, 5, 5); + * console.log(buf.toString('hex')); + * ``` + * + * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFillSync } = await import('crypto'); + * + * const a = new Uint32Array(10); + * console.log(Buffer.from(randomFillSync(a).buffer, + * a.byteOffset, a.byteLength).toString('hex')); + * + * const b = new DataView(new ArrayBuffer(10)); + * console.log(Buffer.from(randomFillSync(b).buffer, + * b.byteOffset, b.byteLength).toString('hex')); + * + * const c = new ArrayBuffer(10); + * console.log(Buffer.from(randomFillSync(c)).toString('hex')); + * ``` + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @return The object passed as `buffer` argument. + */ + function randomFillSync(buffer: T, offset?: number, size?: number): T; + /** + * This function is similar to {@link randomBytes} but requires the first + * argument to be a `Buffer` that will be filled. It also + * requires that a callback is passed in. + * + * If the `callback` function is not provided, an error will be thrown. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFill } = await import('crypto'); + * + * const buf = Buffer.alloc(10); + * randomFill(buf, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * randomFill(buf, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * // The above is equivalent to the following: + * randomFill(buf, 5, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * ``` + * + * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as`buffer`. + * + * While this includes instances of `Float32Array` and `Float64Array`, this + * function should not be used to generate random floating-point numbers. The + * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array + * contains finite numbers only, they are not drawn from a uniform random + * distribution and have no meaningful lower or upper bounds. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFill } = await import('crypto'); + * + * const a = new Uint32Array(10); + * randomFill(a, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const b = new DataView(new ArrayBuffer(10)); + * randomFill(b, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const c = new ArrayBuffer(10); + * randomFill(c, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf).toString('hex')); + * }); + * ``` + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomFill()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomFill` requests when doing so as part of fulfilling a client + * request. + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @param callback `function(err, buf) {}`. + */ + function randomFill(buffer: T, callback: (err: Error | null, buf: T) => void): void; + function randomFill(buffer: T, offset: number, callback: (err: Error | null, buf: T) => void): void; + function randomFill(buffer: T, offset: number, size: number, callback: (err: Error | null, buf: T) => void): void; + interface ScryptOptions { + cost?: number | undefined; + blockSize?: number | undefined; + parallelization?: number | undefined; + N?: number | undefined; + r?: number | undefined; + p?: number | undefined; + maxmem?: number | undefined; + } + /** + * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * The `callback` function is called with two arguments: `err` and `derivedKey`.`err` is an exception object when key derivation fails, otherwise `err` is`null`. `derivedKey` is passed to the + * callback as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scrypt + * } = await import('crypto'); + * + * // Using the factory defaults. + * scrypt('password', 'salt', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * // Using a custom N parameter. Must be a power of two. + * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34' + * }); + * ``` + * @since v10.5.0 + */ + function scrypt(password: BinaryLike, salt: BinaryLike, keylen: number, callback: (err: Error | null, derivedKey: Buffer) => void): void; + function scrypt(password: BinaryLike, salt: BinaryLike, keylen: number, options: ScryptOptions, callback: (err: Error | null, derivedKey: Buffer) => void): void; + /** + * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * An exception is thrown when key derivation fails, otherwise the derived key is + * returned as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scryptSync + * } = await import('crypto'); + * // Using the factory defaults. + * + * const key1 = scryptSync('password', 'salt', 64); + * console.log(key1.toString('hex')); // '3745e48...08d59ae' + * // Using a custom N parameter. Must be a power of two. + * const key2 = scryptSync('password', 'salt', 64, { N: 1024 }); + * console.log(key2.toString('hex')); // '3745e48...aa39b34' + * ``` + * @since v10.5.0 + */ + function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer; + interface RsaPublicKey { + key: KeyLike; + padding?: number | undefined; + } + interface RsaPrivateKey { + key: KeyLike; + passphrase?: string | undefined; + /** + * @default 'sha1' + */ + oaepHash?: string | undefined; + oaepLabel?: NodeJS.TypedArray | undefined; + padding?: number | undefined; + } + /** + * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using + * the corresponding private key, for example using {@link privateDecrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.11.14 + */ + function publicEncrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `key`.`buffer` was previously encrypted using + * the corresponding private key, for example using {@link privateEncrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v1.1.0 + */ + function publicDecrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using + * the corresponding public key, for example using {@link publicEncrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * @since v0.11.14 + */ + function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using + * the corresponding public key, for example using {@link publicDecrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * @since v1.1.0 + */ + function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * ```js + * const { + * getCiphers + * } = await import('crypto'); + * + * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] + * ``` + * @since v0.9.3 + * @return An array with the names of the supported cipher algorithms. + */ + function getCiphers(): string[]; + /** + * ```js + * const { + * getCurves + * } = await import('crypto'); + * + * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] + * ``` + * @since v2.3.0 + * @return An array with the names of the supported elliptic curves. + */ + function getCurves(): string[]; + /** + * @since v10.0.0 + * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}. + */ + function getFips(): 1 | 0; + /** + * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build. Throws an error if FIPS mode is not available. + * @since v10.0.0 + * @param bool `true` to enable FIPS mode. + */ + function setFips(bool: boolean): void; + /** + * ```js + * const { + * getHashes + * } = await import('crypto'); + * + * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] + * ``` + * @since v0.9.3 + * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms. + */ + function getHashes(): string[]; + /** + * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH) + * key exchanges. + * + * Instances of the `ECDH` class can be created using the {@link createECDH} function. + * + * ```js + * import assert from 'assert'; + * + * const { + * createECDH + * } = await import('crypto'); + * + * // Generate Alice's keys... + * const alice = createECDH('secp521r1'); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createECDH('secp521r1'); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * // OK + * ``` + * @since v0.11.14 + */ + class ECDH { + private constructor(); + /** + * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the + * format specified by `format`. The `format` argument specifies point encoding + * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is + * interpreted using the specified `inputEncoding`, and the returned key is encoded + * using the specified `outputEncoding`. + * + * Use {@link getCurves} to obtain a list of available curve names. + * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display + * the name and description of each available elliptic curve. + * + * If `format` is not specified the point will be returned in `'uncompressed'`format. + * + * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * Example (uncompressing a key): + * + * ```js + * const { + * createECDH, + * ECDH + * } = await import('crypto'); + * + * const ecdh = createECDH('secp256k1'); + * ecdh.generateKeys(); + * + * const compressedKey = ecdh.getPublicKey('hex', 'compressed'); + * + * const uncompressedKey = ECDH.convertKey(compressedKey, + * 'secp256k1', + * 'hex', + * 'hex', + * 'uncompressed'); + * + * // The converted key and the uncompressed public key should be the same + * console.log(uncompressedKey === ecdh.getPublicKey('hex')); + * ``` + * @since v10.0.0 + * @param inputEncoding The `encoding` of the `key` string. + * @param outputEncoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + static convertKey( + key: BinaryLike, + curve: string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: 'latin1' | 'hex' | 'base64' | 'base64url', + format?: 'uncompressed' | 'compressed' | 'hybrid' + ): Buffer | string; + /** + * Generates private and public EC Diffie-Hellman key values, and returns + * the public key in the specified `format` and `encoding`. This key should be + * transferred to the other party. + * + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format. + * + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using specified `inputEncoding`, and the returned secret + * is encoded using the specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or`DataView`. + * + * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned. + * + * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey`lies outside of the elliptic curve. Since `otherPublicKey` is + * usually supplied from a remote user over an insecure network, + * be sure to handle this exception accordingly. + * @since v0.11.14 + * @param inputEncoding The `encoding` of the `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding: BinaryToTextEncoding): string; + /** + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @return The EC Diffie-Hellman in the specified `encoding`. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format. + * + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param [encoding] The `encoding` of the return value. + * @param [format='uncompressed'] + * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`. + */ + getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer; + getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Sets the EC Diffie-Hellman private key. + * If `encoding` is provided, `privateKey` is expected + * to be a string; otherwise `privateKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `privateKey` is not valid for the curve specified when the `ECDH` object was + * created, an error is thrown. Upon setting the private key, the associated + * public point (key) is also generated and set in the `ECDH` object. + * @since v0.11.14 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; + } + /** + * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a + * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent + * OpenSSL releases, `openssl ecparam -list_curves` will also display the name + * and description of each available elliptic curve. + * @since v0.11.14 + */ + function createECDH(curveName: string): ECDH; + /** + * This function is based on a constant-time algorithm. + * Returns true if `a` is equal to `b`, without leaking timing information that + * would allow an attacker to guess one of the values. This is suitable for + * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/). + * + * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they + * must have the same byte length. An error is thrown if `a` and `b` have + * different byte lengths. + * + * If at least one of `a` and `b` is a `TypedArray` with more than one byte per + * entry, such as `Uint16Array`, the result will be computed using the platform + * byte order. + * + * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code + * is timing-safe. Care should be taken to ensure that the surrounding code does + * not introduce timing vulnerabilities. + * @since v6.6.0 + */ + function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean; + /** @deprecated since v10.0.0 */ + const DEFAULT_ENCODING: BufferEncoding; + type KeyType = 'rsa' | 'rsa-pss' | 'dsa' | 'ec' | 'ed25519' | 'ed448' | 'x25519' | 'x448'; + type KeyFormat = 'pem' | 'der'; + interface BasePrivateKeyEncodingOptions { + format: T; + cipher?: string | undefined; + passphrase?: string | undefined; + } + interface KeyPairKeyObjectResult { + publicKey: KeyObject; + privateKey: KeyObject; + } + interface ED25519KeyPairKeyObjectOptions {} + interface ED448KeyPairKeyObjectOptions {} + interface X25519KeyPairKeyObjectOptions {} + interface X448KeyPairKeyObjectOptions {} + interface ECKeyPairKeyObjectOptions { + /** + * Name of the curve to use + */ + namedCurve: string; + } + interface RSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + } + interface RSAPSSKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + } + interface DSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + } + interface RSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + publicKeyEncoding: { + type: 'pkcs1' | 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs1' | 'pkcs8'; + }; + } + interface RSAPSSKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface DSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface ECKeyPairOptions { + /** + * Name of the curve to use. + */ + namedCurve: string; + publicKeyEncoding: { + type: 'pkcs1' | 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'sec1' | 'pkcs8'; + }; + } + interface ED25519KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface ED448KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface X25519KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface X448KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface KeyPairSyncResult { + publicKey: T1; + privateKey: T2; + } + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * When encoding public keys, it is recommended to use `'spki'`. When encoding + * private keys, it is recommended to use `'pkcs8'` with a strong passphrase, + * and to keep the passphrase confidential. + * + * ```js + * const { + * generateKeyPairSync + * } = await import('crypto'); + * + * const { + * publicKey, + * privateKey, + * } = generateKeyPairSync('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem' + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret' + * } + * }); + * ``` + * + * The return value `{ publicKey, privateKey }` represents the generated key pair. + * When PEM encoding was selected, the respective key will be a string, otherwise + * it will be a buffer containing the data encoded as DER. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * It is recommended to encode public keys as `'spki'` and private keys as`'pkcs8'` with encryption for long-term storage: + * + * ```js + * const { + * generateKeyPair + * } = await import('crypto'); + * + * generateKeyPair('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem' + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret' + * } + * }, (err, publicKey, privateKey) => { + * // Handle errors and use the generated key pair. + * }); + * ``` + * + * On completion, `callback` will be called with `err` set to `undefined` and`publicKey` / `privateKey` representing the generated key pair. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `publicKey` and `privateKey` properties. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + namespace generateKeyPair { + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'rsa', options: RSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'rsa-pss', options: RSAPSSKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'dsa', options: DSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'ec', options: ECKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'ed25519', options?: ED25519KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'ed448', options?: ED448KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'x25519', options?: X25519KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'x448', options?: X448KeyPairKeyObjectOptions): Promise; + } + /** + * Calculates and returns the signature for `data` using the given private key and + * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is + * dependent upon the key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPrivateKey}. If it is an object, the following + * additional properties can be passed: + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function sign(algorithm: string | null | undefined, data: NodeJS.ArrayBufferView, key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + callback: (error: Error | null, data: Buffer) => void + ): void; + /** + * Verifies the given signature for `data` using the given key and algorithm. If`algorithm` is `null` or `undefined`, then the algorithm is dependent upon the + * key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPublicKey}. If it is an object, the following + * additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the `data`. + * + * Because public keys can be derived from private keys, a private key or a public + * key may be passed for `key`. + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function verify(algorithm: string | null | undefined, data: NodeJS.ArrayBufferView, key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, signature: NodeJS.ArrayBufferView): boolean; + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, + signature: NodeJS.ArrayBufferView, + callback: (error: Error | null, result: boolean) => void + ): void; + /** + * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`. + * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'`(for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES). + * @since v13.9.0, v12.17.0 + */ + function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer; + type CipherMode = 'cbc' | 'ccm' | 'cfb' | 'ctr' | 'ecb' | 'gcm' | 'ocb' | 'ofb' | 'stream' | 'wrap' | 'xts'; + interface CipherInfoOptions { + /** + * A test key length. + */ + keyLength?: number | undefined; + /** + * A test IV length. + */ + ivLength?: number | undefined; + } + interface CipherInfo { + /** + * The name of the cipher. + */ + name: string; + /** + * The nid of the cipher. + */ + nid: number; + /** + * The block size of the cipher in bytes. + * This property is omitted when mode is 'stream'. + */ + blockSize?: number | undefined; + /** + * The expected or default initialization vector length in bytes. + * This property is omitted if the cipher does not use an initialization vector. + */ + ivLength?: number | undefined; + /** + * The expected or default key length in bytes. + */ + keyLength: number; + /** + * The cipher mode. + */ + mode: CipherMode; + } + /** + * Returns information about a given cipher. + * + * Some ciphers accept variable length keys and initialization vectors. By default, + * the `crypto.getCipherInfo()` method will return the default values for these + * ciphers. To test if a given key length or iv length is acceptable for given + * cipher, use the `keyLength` and `ivLength` options. If the given values are + * unacceptable, `undefined` will be returned. + * @since v15.0.0 + * @param nameOrNid The name or nid of the cipher to query. + */ + function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined; + /** + * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`,`salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an errors occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. The successfully generated `derivedKey` will + * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any + * of the input arguments specify invalid values or types. + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * hkdf + * } = await import('crypto'); + * + * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * }); + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. It must be at least one byte in length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdf(digest: string, irm: BinaryLike | KeyObject, salt: BinaryLike, info: BinaryLike, keylen: number, callback: (err: Error | null, derivedKey: ArrayBuffer) => void): void; + /** + * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The + * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of`keylen` bytes. + * + * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + * + * An error will be thrown if any of the input arguments specify invalid values or + * types, or if the derived key cannot be generated. + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * hkdfSync + * } = await import('crypto'); + * + * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. It must be at least one byte in length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdfSync(digest: string, ikm: BinaryLike | KeyObject, salt: BinaryLike, info: BinaryLike, keylen: number): ArrayBuffer; + interface SecureHeapUsage { + /** + * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag. + */ + total: number; + /** + * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag. + */ + min: number; + /** + * The total number of bytes currently allocated from the secure heap. + */ + used: number; + /** + * The calculated ratio of `used` to `total` allocated bytes. + */ + utilization: number; + } + /** + * @since v15.6.0 + */ + function secureHeapUsed(): SecureHeapUsage; + interface RandomUUIDOptions { + /** + * By default, to improve performance, + * Node.js will pre-emptively generate and persistently cache enough + * random data to generate up to 128 random UUIDs. To generate a UUID + * without using the cache, set `disableEntropyCache` to `true`. + * + * @default `false` + */ + disableEntropyCache?: boolean | undefined; + } + /** + * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a + * cryptographic pseudorandom number generator. + * @since v15.6.0, v14.17.0 + */ + function randomUUID(options?: RandomUUIDOptions): string; + interface X509CheckOptions { + /** + * @default 'always' + */ + subject: 'always' | 'never'; + /** + * @default true + */ + wildcards: boolean; + /** + * @default true + */ + partialWildcards: boolean; + /** + * @default false + */ + multiLabelWildcards: boolean; + /** + * @default false + */ + singleLabelSubdomains: boolean; + } + /** + * Encapsulates an X509 certificate and provides read-only access to + * its information. + * + * ```js + * const { X509Certificate } = await import('crypto'); + * + * const x509 = new X509Certificate('{... pem encoded cert ...}'); + * + * console.log(x509.subject); + * ``` + * @since v15.6.0 + */ + class X509Certificate { + /** + * Will be \`true\` if this is a Certificate Authority (CA) certificate. + * @since v15.6.0 + */ + readonly ca: boolean; + /** + * The SHA-1 fingerprint of this certificate. + * + * Because SHA-1 is cryptographically broken and because the security of SHA-1 is + * significantly worse than that of algorithms that are commonly used to sign + * certificates, consider using `x509.fingerprint256` instead. + * @since v15.6.0 + */ + readonly fingerprint: string; + /** + * The SHA-256 fingerprint of this certificate. + * @since v15.6.0 + */ + readonly fingerprint256: string; + /** + * The SHA-512 fingerprint of this certificate. + * @since v16.14.0 + */ + readonly fingerprint512: string; + /** + * The complete subject of this certificate. + * @since v15.6.0 + */ + readonly subject: string; + /** + * The subject alternative name specified for this certificate or `undefined` + * if not available. + * @since v15.6.0 + */ + readonly subjectAltName: string | undefined; + /** + * The information access content of this certificate or `undefined` if not + * available. + * @since v15.6.0 + */ + readonly infoAccess: string | undefined; + /** + * An array detailing the key usages for this certificate. + * @since v15.6.0 + */ + readonly keyUsage: string[]; + /** + * The issuer identification included in this certificate. + * @since v15.6.0 + */ + readonly issuer: string; + /** + * The issuer certificate or `undefined` if the issuer certificate is not + * available. + * @since v15.9.0 + */ + readonly issuerCertificate?: X509Certificate | undefined; + /** + * The public key `KeyObject` for this certificate. + * @since v15.6.0 + */ + readonly publicKey: KeyObject; + /** + * A `Buffer` containing the DER encoding of this certificate. + * @since v15.6.0 + */ + readonly raw: Buffer; + /** + * The serial number of this certificate. + * + * Serial numbers are assigned by certificate authorities and do not uniquely + * identify certificates. Consider using `x509.fingerprint256` as a unique + * identifier instead. + * @since v15.6.0 + */ + readonly serialNumber: string; + /** + * The date/time from which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validFrom: string; + /** + * The date/time until which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validTo: string; + constructor(buffer: BinaryLike); + /** + * Checks whether the certificate matches the given email address. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any email addresses. + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching email + * address, the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns `email` if the certificate matches, `undefined` if it does not. + */ + checkEmail(email: string, options?: Pick): string | undefined; + /** + * Checks whether the certificate matches the given host name. + * + * If the certificate matches the given host name, the matching subject name is + * returned. The returned name might be an exact match (e.g., `foo.example.com`) + * or it might contain wildcards (e.g., `*.example.com`). Because host name + * comparisons are case-insensitive, the returned subject name might also differ + * from the given `name` in capitalization. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS"). + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching DNS name, + * the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`. + */ + checkHost(name: string, options?: X509CheckOptions): string | undefined; + /** + * Checks whether the certificate matches the given IP address (IPv4 or IPv6). + * + * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they + * must match the given `ip` address exactly. Other subject alternative names as + * well as the subject field of the certificate are ignored. + * @since v15.6.0 + * @return Returns `ip` if the certificate matches, `undefined` if it does not. + */ + checkIP(ip: string): string | undefined; + /** + * Checks whether this certificate was issued by the given `otherCert`. + * @since v15.6.0 + */ + checkIssued(otherCert: X509Certificate): boolean; + /** + * Checks whether the public key for this certificate is consistent with + * the given private key. + * @since v15.6.0 + * @param privateKey A private key. + */ + checkPrivateKey(privateKey: KeyObject): boolean; + /** + * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded + * certificate. + * @since v15.6.0 + */ + toJSON(): string; + /** + * Returns information about this certificate using the legacy `certificate object` encoding. + * @since v15.6.0 + */ + toLegacyObject(): PeerCertificate; + /** + * Returns the PEM-encoded certificate. + * @since v15.6.0 + */ + toString(): string; + /** + * Verifies that this certificate was signed by the given public key. + * Does not perform any other validation checks on the certificate. + * @since v15.6.0 + * @param publicKey A public key. + */ + verify(publicKey: KeyObject): boolean; + } + type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint; + interface GeneratePrimeOptions { + add?: LargeNumberLike | undefined; + rem?: LargeNumberLike | undefined; + /** + * @default false + */ + safe?: boolean | undefined; + bigint?: boolean | undefined; + } + interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { + bigint: true; + } + interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { + bigint?: false | undefined; + } + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime(size: number, options: GeneratePrimeOptionsBigInt, callback: (err: Error | null, prime: bigint) => void): void; + function generatePrime(size: number, options: GeneratePrimeOptionsArrayBuffer, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime(size: number, options: GeneratePrimeOptions, callback: (err: Error | null, prime: ArrayBuffer | bigint) => void): void; + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrimeSync(size: number): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint; + interface CheckPrimeOptions { + /** + * The number of Miller-Rabin probabilistic primality iterations to perform. + * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most 2-64 for random input. + * Care must be used when selecting a number of checks. + * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details. + * + * @default 0 + */ + checks?: number | undefined; + } + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + */ + function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void; + function checkPrime(value: LargeNumberLike, options: CheckPrimeOptions, callback: (err: Error | null, result: boolean) => void): void; + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`. + */ + function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean; + /** + * Load and set the `engine` for some or all OpenSSL functions (selected by flags). + * + * `engine` could be either an id or a path to the engine's shared library. + * + * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. + * The `flags` is a bit field taking one of or a mix of the following flags (defined in `crypto.constants`): + * + * - `crypto.constants.ENGINE_METHOD_RSA` + * - `crypto.constants.ENGINE_METHOD_DSA` + * - `crypto.constants.ENGINE_METHOD_DH` + * - `crypto.constants.ENGINE_METHOD_RAND` + * - `crypto.constants.ENGINE_METHOD_EC` + * - `crypto.constants.ENGINE_METHOD_CIPHERS` + * - `crypto.constants.ENGINE_METHOD_DIGESTS` + * - `crypto.constants.ENGINE_METHOD_PKEY_METHS` + * - `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS` + * - `crypto.constants.ENGINE_METHOD_ALL` + * - `crypto.constants.ENGINE_METHOD_NONE` + * + * The flags below are deprecated in OpenSSL-1.1.0. + * + * - `crypto.constants.ENGINE_METHOD_ECDH` + * - `crypto.constants.ENGINE_METHOD_ECDSA` + * - `crypto.constants.ENGINE_METHOD_STORE` + * @since v0.11.11 + * @param [flags=crypto.constants.ENGINE_METHOD_ALL] + */ + function setEngine(engine: string, flags?: number): void; + /** + * A convenient alias for `crypto.webcrypto.getRandomValues()`. + * This implementation is not compliant with the Web Crypto spec, + * to write web-compatible code use `crypto.webcrypto.getRandomValues()` instead. + * @since v17.4.0 + * @returns Returns `typedArray`. + */ + function getRandomValues(typedArray: T): T; + /** + * A convenient alias for `crypto.webcrypto.subtle`. + * @since v17.4.0 + */ + const subtle: webcrypto.SubtleCrypto; + /** + * An implementation of the Web Crypto API standard. + * + * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details. + * @since v15.0.0 + */ + const webcrypto: webcrypto.Crypto; + namespace webcrypto { + type BufferSource = ArrayBufferView | ArrayBuffer; + type KeyFormat = 'jwk' | 'pkcs8' | 'raw' | 'spki'; + type KeyType = 'private' | 'public' | 'secret'; + type KeyUsage = 'decrypt' | 'deriveBits' | 'deriveKey' | 'encrypt' | 'sign' | 'unwrapKey' | 'verify' | 'wrapKey'; + type AlgorithmIdentifier = Algorithm | string; + type HashAlgorithmIdentifier = AlgorithmIdentifier; + type NamedCurve = string; + type BigInteger = Uint8Array; + interface AesCbcParams extends Algorithm { + iv: BufferSource; + } + interface AesCtrParams extends Algorithm { + counter: BufferSource; + length: number; + } + interface AesDerivedKeyParams extends Algorithm { + length: number; + } + interface AesGcmParams extends Algorithm { + additionalData?: BufferSource; + iv: BufferSource; + tagLength?: number; + } + interface AesKeyAlgorithm extends KeyAlgorithm { + length: number; + } + interface AesKeyGenParams extends Algorithm { + length: number; + } + interface Algorithm { + name: string; + } + interface EcKeyAlgorithm extends KeyAlgorithm { + namedCurve: NamedCurve; + } + interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcKeyImportParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcdhKeyDeriveParams extends Algorithm { + public: CryptoKey; + } + interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface Ed448Params extends Algorithm { + context?: BufferSource; + } + interface HkdfParams extends Algorithm { + hash: HashAlgorithmIdentifier; + info: BufferSource; + salt: BufferSource; + } + interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface HmacKeyAlgorithm extends KeyAlgorithm { + hash: KeyAlgorithm; + length: number; + } + interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface JsonWebKey { + alg?: string; + crv?: string; + d?: string; + dp?: string; + dq?: string; + e?: string; + ext?: boolean; + k?: string; + key_ops?: string[]; + kty?: string; + n?: string; + oth?: RsaOtherPrimesInfo[]; + p?: string; + q?: string; + qi?: string; + use?: string; + x?: string; + y?: string; + } + interface KeyAlgorithm { + name: string; + } + interface Pbkdf2Params extends Algorithm { + hash: HashAlgorithmIdentifier; + iterations: number; + salt: BufferSource; + } + interface RsaHashedImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm { + hash: KeyAlgorithm; + } + interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; + } + interface RsaKeyAlgorithm extends KeyAlgorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaOaepParams extends Algorithm { + label?: BufferSource; + } + interface RsaOtherPrimesInfo { + d?: string; + r?: string; + t?: string; + } + interface RsaPssParams extends Algorithm { + saltLength: number; + } + /** + * Calling `require('node:crypto').webcrypto` returns an instance of the `Crypto` class. + * `Crypto` is a singleton that provides access to the remainder of the crypto API. + * @since v15.0.0 + */ + interface Crypto { + /** + * Provides access to the `SubtleCrypto` API. + * @since v15.0.0 + */ + readonly subtle: SubtleCrypto; + /** + * Generates cryptographically strong random values. + * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned. + * + * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted. + * + * An error will be thrown if the given `typedArray` is larger than 65,536 bytes. + * @since v15.0.0 + */ + getRandomValues>(typedArray: T): T; + /** + * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID. + * The UUID is generated using a cryptographic pseudorandom number generator. + * @since v16.7.0 + */ + randomUUID(): string; + CryptoKey: CryptoKeyConstructor; + } + // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable. + interface CryptoKeyConstructor { + /** Illegal constructor */ + (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user. + readonly length: 0; + readonly name: 'CryptoKey'; + readonly prototype: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface CryptoKey { + /** + * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters. + * @since v15.0.0 + */ + readonly algorithm: KeyAlgorithm; + /** + * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`. + * @since v15.0.0 + */ + readonly extractable: boolean; + /** + * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key. + * @since v15.0.0 + */ + readonly type: KeyType; + /** + * An array of strings identifying the operations for which the key may be used. + * + * The possible usages are: + * - `'encrypt'` - The key may be used to encrypt data. + * - `'decrypt'` - The key may be used to decrypt data. + * - `'sign'` - The key may be used to generate digital signatures. + * - `'verify'` - The key may be used to verify digital signatures. + * - `'deriveKey'` - The key may be used to derive a new key. + * - `'deriveBits'` - The key may be used to derive bits. + * - `'wrapKey'` - The key may be used to wrap another key. + * - `'unwrapKey'` - The key may be used to unwrap another key. + * + * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`). + * @since v15.0.0 + */ + readonly usages: KeyUsage[]; + } + /** + * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair. + * @since v15.0.0 + */ + interface CryptoKeyPair { + /** + * A {@link CryptoKey} whose type will be `'private'`. + * @since v15.0.0 + */ + privateKey: CryptoKey; + /** + * A {@link CryptoKey} whose type will be `'public'`. + * @since v15.0.0 + */ + publicKey: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface SubtleCrypto { + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `key`, + * `subtle.decrypt()` attempts to decipher the provided `data`. If successful, + * the returned promise will be resolved with an `` containing the plaintext result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + decrypt(algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, key: CryptoKey, data: BufferSource): Promise; + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`, + * `subtle.deriveBits()` attempts to generate `length` bits. + * The Node.js implementation requires that when `length` is a number it must be multiple of `8`. + * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed + * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms. + * If successful, the returned promise will be resolved with an `` containing the generated data. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @since v15.0.0 + */ + deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise; + deriveBits(algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params, baseKey: CryptoKey, length: number): Promise; + /** + * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`, + * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`. + * + * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material, + * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + deriveKey( + algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + derivedKeyAlgorithm: AlgorithmIdentifier | AesDerivedKeyParams | HmacImportParams | HkdfParams | Pbkdf2Params, + extractable: boolean, + keyUsages: ReadonlyArray + ): Promise; + /** + * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`. + * If successful, the returned promise is resolved with an `` containing the computed digest. + * + * If `algorithm` is provided as a ``, it must be one of: + * + * - `'SHA-1'` + * - `'SHA-256'` + * - `'SHA-384'` + * - `'SHA-512'` + * + * If `algorithm` is provided as an ``, it must have a `name` property whose value is one of the above. + * @since v15.0.0 + */ + digest(algorithm: AlgorithmIdentifier, data: BufferSource): Promise; + /** + * Using the method and parameters specified by `algorithm` and the keying material provided by `key`, + * `subtle.encrypt()` attempts to encipher `data`. If successful, + * the returned promise is resolved with an `` containing the encrypted result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + encrypt(algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, key: CryptoKey, data: BufferSource): Promise; + /** + * Exports the given key into the specified format, if supported. + * + * If the `` is not extractable, the returned promise will reject. + * + * When `format` is either `'pkcs8'` or `'spki'` and the export is successful, + * the returned promise will be resolved with an `` containing the exported key data. + * + * When `format` is `'jwk'` and the export is successful, the returned promise will be resolved with a + * JavaScript object conforming to the {@link https://tools.ietf.org/html/rfc7517 JSON Web Key} specification. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @returns `` containing ``. + * @since v15.0.0 + */ + exportKey(format: 'jwk', key: CryptoKey): Promise; + exportKey(format: Exclude, key: CryptoKey): Promise; + /** + * Using the method and parameters provided in `algorithm`, + * `subtle.generateKey()` attempts to generate new keying material. + * Depending the method used, the method may generate either a single `` or a ``. + * + * The `` (public and private key) generating algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * The `` (secret key) generating algorithms supported include: + * + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + generateKey(algorithm: RsaHashedKeyGenParams | EcKeyGenParams, extractable: boolean, keyUsages: ReadonlyArray): Promise; + generateKey(algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, extractable: boolean, keyUsages: ReadonlyArray): Promise; + generateKey(algorithm: AlgorithmIdentifier, extractable: boolean, keyUsages: KeyUsage[]): Promise; + /** + * The `subtle.importKey()` method attempts to interpret the provided `keyData` as the given `format` + * to create a `` instance using the provided `algorithm`, `extractable`, and `keyUsages` arguments. + * If the import is successful, the returned promise will be resolved with the created ``. + * + * If importing a `'PBKDF2'` key, `extractable` must be `false`. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + importKey( + format: 'jwk', + keyData: JsonWebKey, + algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, + extractable: boolean, + keyUsages: ReadonlyArray + ): Promise; + importKey( + format: Exclude, + keyData: BufferSource, + algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[] + ): Promise; + /** + * Using the method and parameters given by `algorithm` and the keying material provided by `key`, + * `subtle.sign()` attempts to generate a cryptographic signature of `data`. If successful, + * the returned promise is resolved with an `` containing the generated signature. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + sign(algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, key: CryptoKey, data: BufferSource): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.unwrapKey()` method attempts to decrypt a wrapped key and create a `` instance. + * It is equivalent to calling `subtle.decrypt()` first on the encrypted key data (using the `wrappedKey`, `unwrapAlgo`, and `unwrappingKey` arguments as input) + * then passing the results in to the `subtle.importKey()` method using the `unwrappedKeyAlgo`, `extractable`, and `keyUsages` arguments as inputs. + * If successful, the returned promise is resolved with a `` object. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * + * The unwrapped key algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + unwrapKey( + format: KeyFormat, + wrappedKey: BufferSource, + unwrappingKey: CryptoKey, + unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + unwrappedKeyAlgorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[] + ): Promise; + /** + * Using the method and parameters given in `algorithm` and the keying material provided by `key`, + * `subtle.verify()` attempts to verify that `signature` is a valid cryptographic signature of `data`. + * The returned promise is resolved with either `true` or `false`. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + verify(algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, key: CryptoKey, signature: BufferSource, data: BufferSource): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.wrapKey()` method exports the keying material into the format identified by `format`, + * then encrypts it using the method and parameters specified by `wrapAlgo` and the keying material provided by `wrappingKey`. + * It is the equivalent to calling `subtle.exportKey()` using `format` and `key` as the arguments, + * then passing the result to the `subtle.encrypt()` method using `wrappingKey` and `wrapAlgo` as inputs. + * If successful, the returned promise will be resolved with an `` containing the encrypted key data. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @since v15.0.0 + */ + wrapKey(format: KeyFormat, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams): Promise; + } + } +} +declare module 'node:crypto' { + export * from 'crypto'; +} diff --git a/node_modules/@types/node/dgram.d.ts b/node_modules/@types/node/dgram.d.ts new file mode 100755 index 0000000..247328d --- /dev/null +++ b/node_modules/@types/node/dgram.d.ts @@ -0,0 +1,545 @@ +/** + * The `dgram` module provides an implementation of UDP datagram sockets. + * + * ```js + * import dgram from 'dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.log(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/dgram.js) + */ +declare module 'dgram' { + import { AddressInfo } from 'node:net'; + import * as dns from 'node:dns'; + import { EventEmitter, Abortable } from 'node:events'; + interface RemoteInfo { + address: string; + family: 'IPv4' | 'IPv6'; + port: number; + size: number; + } + interface BindOptions { + port?: number | undefined; + address?: string | undefined; + exclusive?: boolean | undefined; + fd?: number | undefined; + } + type SocketType = 'udp4' | 'udp6'; + interface SocketOptions extends Abortable { + type: SocketType; + reuseAddr?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + recvBufferSize?: number | undefined; + sendBufferSize?: number | undefined; + lookup?: ((hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void) | undefined; + } + /** + * Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram + * messages. When `address` and `port` are not passed to `socket.bind()` the + * method will bind the socket to the "all interfaces" address on a random port + * (it does the right thing for both `udp4` and `udp6` sockets). The bound address + * and port can be retrieved using `socket.address().address` and `socket.address().port`. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket: + * + * ```js + * const controller = new AbortController(); + * const { signal } = controller; + * const server = dgram.createSocket({ type: 'udp4', signal }); + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * // Later, when you want to close the server. + * controller.abort(); + * ``` + * @since v0.11.13 + * @param options Available options are: + * @param callback Attached as a listener for `'message'` events. Optional. + */ + function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + /** + * Encapsulates the datagram functionality. + * + * New instances of `dgram.Socket` are created using {@link createSocket}. + * The `new` keyword is not to be used to create `dgram.Socket` instances. + * @since v0.1.99 + */ + class Socket extends EventEmitter { + /** + * Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not + * specified, the operating system will choose + * one interface and will add membership to it. To add membership to every + * available interface, call `addMembership` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * + * When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur: + * + * ```js + * import cluster from 'cluster'; + * import dgram from 'dgram'; + * + * if (cluster.isPrimary) { + * cluster.fork(); // Works ok. + * cluster.fork(); // Fails with EADDRINUSE. + * } else { + * const s = dgram.createSocket('udp4'); + * s.bind(1234, () => { + * s.addMembership('224.0.0.114'); + * }); + * } + * ``` + * @since v0.6.9 + */ + addMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * Returns an object containing the address information for a socket. + * For UDP sockets, this object will contain `address`, `family` and `port`properties. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.99 + */ + address(): AddressInfo; + /** + * For UDP sockets, causes the `dgram.Socket` to listen for datagram + * messages on a named `port` and optional `address`. If `port` is not + * specified or is `0`, the operating system will attempt to bind to a + * random port. If `address` is not specified, the operating system will + * attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is + * called. + * + * Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very + * useful. + * + * A bound datagram socket keeps the Node.js process running to receive + * datagram messages. + * + * If binding fails, an `'error'` event is generated. In rare case (e.g. + * attempting to bind with a closed socket), an `Error` may be thrown. + * + * Example of a UDP server listening on port 41234: + * + * ```js + * import dgram from 'dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.log(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @since v0.1.99 + * @param callback with no parameters. Called when binding is complete. + */ + bind(port?: number, address?: string, callback?: () => void): this; + bind(port?: number, callback?: () => void): this; + bind(callback?: () => void): this; + bind(options: BindOptions, callback?: () => void): this; + /** + * Close the underlying socket and stop listening for data on it. If a callback is + * provided, it is added as a listener for the `'close'` event. + * @since v0.1.99 + * @param callback Called when the socket has been closed. + */ + close(callback?: () => void): this; + /** + * Associates the `dgram.Socket` to a remote address and port. Every + * message sent by this handle is automatically sent to that destination. Also, + * the socket will only receive messages from that remote peer. + * Trying to call `connect()` on an already connected socket will result + * in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not + * provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) + * will be used by default. Once the connection is complete, a `'connect'` event + * is emitted and the optional `callback` function is called. In case of failure, + * the `callback` is called or, failing this, an `'error'` event is emitted. + * @since v12.0.0 + * @param callback Called when the connection is completed or on error. + */ + connect(port: number, address?: string, callback?: () => void): void; + connect(port: number, callback: () => void): void; + /** + * A synchronous function that disassociates a connected `dgram.Socket` from + * its remote address. Trying to call `disconnect()` on an unbound or already + * disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception. + * @since v12.0.0 + */ + disconnect(): void; + /** + * Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the + * kernel when the socket is closed or the process terminates, so most apps will + * never have reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v0.6.9 + */ + dropMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_RCVBUF` socket receive buffer size in bytes. + */ + getRecvBufferSize(): number; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_SNDBUF` socket send buffer size in bytes. + */ + getSendBufferSize(): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active. The `socket.ref()` method adds the socket back to the reference + * counting and restores the default behavior. + * + * Calling `socket.ref()` multiples times will have no additional effect. + * + * The `socket.ref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + ref(): this; + /** + * Returns an object containing the `address`, `family`, and `port` of the remote + * endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception + * if the socket is not connected. + * @since v12.0.0 + */ + remoteAddress(): AddressInfo; + /** + * Broadcasts a datagram on the socket. + * For connectionless sockets, the destination `port` and `address` must be + * specified. Connected sockets, on the other hand, will use their associated + * remote endpoint, so the `port` and `address` arguments must not be set. + * + * The `msg` argument contains the message to be sent. + * Depending on its type, different behavior can apply. If `msg` is a `Buffer`, + * any `TypedArray` or a `DataView`, + * the `offset` and `length` specify the offset within the `Buffer` where the + * message begins and the number of bytes in the message, respectively. + * If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that + * contain multi-byte characters, `offset` and `length` will be calculated with + * respect to `byte length` and not the character position. + * If `msg` is an array, `offset` and `length` must not be specified. + * + * The `address` argument is a string. If the value of `address` is a host name, + * DNS will be used to resolve the address of the host. If `address` is not + * provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default. + * + * If the socket has not been previously bound with a call to `bind`, the socket + * is assigned a random port number and is bound to the "all interfaces" address + * (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.) + * + * An optional `callback` function may be specified to as a way of reporting + * DNS errors or for determining when it is safe to reuse the `buf` object. + * DNS lookups delay the time to send for at least one tick of the + * Node.js event loop. + * + * The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be + * passed as the first argument to the `callback`. If a `callback` is not given, + * the error is emitted as an `'error'` event on the `socket` object. + * + * Offset and length are optional but both _must_ be set if either are used. + * They are supported only when the first argument is a `Buffer`, a `TypedArray`, + * or a `DataView`. + * + * This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket. + * + * Example of sending a UDP packet to a port on `localhost`; + * + * ```js + * import dgram from 'dgram'; + * import { Buffer } from 'buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.send(message, 41234, 'localhost', (err) => { + * client.close(); + * }); + * ``` + * + * Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`; + * + * ```js + * import dgram from 'dgram'; + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('Some '); + * const buf2 = Buffer.from('bytes'); + * const client = dgram.createSocket('udp4'); + * client.send([buf1, buf2], 41234, (err) => { + * client.close(); + * }); + * ``` + * + * Sending multiple buffers might be faster or slower depending on the + * application and operating system. Run benchmarks to + * determine the optimal strategy on a case-by-case basis. Generally speaking, + * however, sending multiple buffers is faster. + * + * Example of sending a UDP packet using a socket connected to a port on`localhost`: + * + * ```js + * import dgram from 'dgram'; + * import { Buffer } from 'buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.connect(41234, 'localhost', (err) => { + * client.send(message, (err) => { + * client.close(); + * }); + * }); + * ``` + * @since v0.1.99 + * @param msg Message to be sent. + * @param offset Offset in the buffer where the message starts. + * @param length Number of bytes in the message. + * @param port Destination port. + * @param address Destination host name or IP address. + * @param callback Called when the message has been sent. + */ + send(msg: string | Uint8Array | ReadonlyArray, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array | ReadonlyArray, port?: number, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array | ReadonlyArray, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array, offset: number, length: number, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array, offset: number, length: number, port?: number, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array, offset: number, length: number, callback?: (error: Error | null, bytes: number) => void): void; + /** + * Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP + * packets may be sent to a local interface's broadcast address. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.6.9 + */ + setBroadcast(flag: boolean): void; + /** + * _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC + * 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_ + * _with a scope index is written as `'IP%scope'` where scope is an interface name_ + * _or interface number._ + * + * Sets the default outgoing multicast interface of the socket to a chosen + * interface or back to system interface selection. The `multicastInterface` must + * be a valid string representation of an IP from the socket's family. + * + * For IPv4 sockets, this should be the IP configured for the desired physical + * interface. All packets sent to multicast on the socket will be sent on the + * interface determined by the most recent successful use of this call. + * + * For IPv6 sockets, `multicastInterface` should include a scope to indicate the + * interface as in the examples that follow. In IPv6, individual `send` calls can + * also use explicit scope in addresses, so only packets sent to a multicast + * address without specifying an explicit scope are affected by the most recent + * successful use of this call. + * + * This method throws `EBADF` if called on an unbound socket. + * + * #### Example: IPv6 outgoing multicast interface + * + * On most systems, where scope format uses the interface name: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%eth1'); + * }); + * ``` + * + * On Windows, where scope format uses an interface number: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%2'); + * }); + * ``` + * + * #### Example: IPv4 outgoing multicast interface + * + * All systems use an IP of the host on the desired physical interface: + * + * ```js + * const socket = dgram.createSocket('udp4'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('10.0.0.2'); + * }); + * ``` + * @since v8.6.0 + */ + setMulticastInterface(multicastInterface: string): void; + /** + * Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`, + * multicast packets will also be received on the local interface. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastLoopback(flag: boolean): boolean; + /** + * Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for + * "Time to Live", in this context it specifies the number of IP hops that a + * packet is allowed to travel through, specifically for multicast traffic. Each + * router or gateway that forwards a packet decrements the TTL. If the TTL is + * decremented to 0 by a router, it will not be forwarded. + * + * The `ttl` argument may be between 0 and 255\. The default on most systems is `1`. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastTTL(ttl: number): number; + /** + * Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setRecvBufferSize(size: number): void; + /** + * Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setSendBufferSize(size: number): void; + /** + * Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live", + * in this context it specifies the number of IP hops that a packet is allowed to + * travel through. Each router or gateway that forwards a packet decrements the + * TTL. If the TTL is decremented to 0 by a router, it will not be forwarded. + * Changing TTL values is typically done for network probes or when multicasting. + * + * The `ttl` argument may be between 1 and 255\. The default on most systems + * is 64. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.101 + */ + setTTL(ttl: number): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active, allowing the process to exit even if the socket is still + * listening. + * + * Calling `socket.unref()` multiple times will have no addition effect. + * + * The `socket.unref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + unref(): this; + /** + * Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket + * option. If the `multicastInterface` argument + * is not specified, the operating system will choose one interface and will add + * membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * @since v13.1.0, v12.16.0 + */ + addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is + * automatically called by the kernel when the + * socket is closed or the process terminates, so most apps will never have + * reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v13.1.0, v12.16.0 + */ + dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. error + * 4. listening + * 5. message + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connect', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'connect'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit(event: 'message', msg: Buffer, rinfo: RemoteInfo): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connect', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connect', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connect', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connect', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + } +} +declare module 'node:dgram' { + export * from 'dgram'; +} diff --git a/node_modules/@types/node/diagnostics_channel.d.ts b/node_modules/@types/node/diagnostics_channel.d.ts new file mode 100755 index 0000000..a87ba8c --- /dev/null +++ b/node_modules/@types/node/diagnostics_channel.d.ts @@ -0,0 +1,153 @@ +/** + * The `diagnostics_channel` module provides an API to create named channels + * to report arbitrary message data for diagnostics purposes. + * + * It can be accessed using: + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * ``` + * + * It is intended that a module writer wanting to report diagnostics messages + * will create one or many top-level channels to report messages through. + * Channels may also be acquired at runtime but it is not encouraged + * due to the additional overhead of doing so. Channels may be exported for + * convenience, but as long as the name is known it can be acquired anywhere. + * + * If you intend for your module to produce diagnostics data for others to + * consume it is recommended that you include documentation of what named + * channels are used along with the shape of the message data. Channel names + * should generally include the module name to avoid collisions with data from + * other modules. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/diagnostics_channel.js) + */ +declare module 'diagnostics_channel' { + /** + * Check if there are active subscribers to the named channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * if (diagnostics_channel.hasSubscribers('my-channel')) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return If there are active subscribers + */ + function hasSubscribers(name: string): boolean; + /** + * This is the primary entry-point for anyone wanting to interact with a named + * channel. It produces a channel object which is optimized to reduce overhead at + * publish time as much as possible. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return The named channel object + */ + function channel(name: string): Channel; + type ChannelListener = (message: unknown, name: string) => void; + /** + * The class `Channel` represents an individual named channel within the data + * pipeline. It is use to track subscribers and to publish messages when there + * are subscribers present. It exists as a separate object to avoid channel + * lookups at publish time, enabling very fast publish speeds and allowing + * for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly + * with `new Channel(name)` is not supported. + * @since v15.1.0, v14.17.0 + */ + class Channel { + readonly name: string; + /** + * Check if there are active subscribers to this channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * if (channel.hasSubscribers) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + */ + readonly hasSubscribers: boolean; + private constructor(name: string); + /** + * Publish a message to any subscribers to the channel. This will + * trigger message handlers synchronously so they will execute within + * the same context. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.publish({ + * some: 'message' + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param message The message to send to the channel subscribers + */ + publish(message: unknown): void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.subscribe((message, name) => { + * // Received data + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param onMessage The handler to receive channel messages + */ + subscribe(onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * function onMessage(message, name) { + * // Received data + * } + * + * channel.subscribe(onMessage); + * + * channel.unsubscribe(onMessage); + * ``` + * @since v15.1.0, v14.17.0 + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + unsubscribe(onMessage: ChannelListener): void; + } +} +declare module 'node:diagnostics_channel' { + export * from 'diagnostics_channel'; +} diff --git a/node_modules/@types/node/dns.d.ts b/node_modules/@types/node/dns.d.ts new file mode 100755 index 0000000..305367b --- /dev/null +++ b/node_modules/@types/node/dns.d.ts @@ -0,0 +1,659 @@ +/** + * The `dns` module enables name resolution. For example, use it to look up IP + * addresses of host names. + * + * Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the + * DNS protocol for lookups. {@link lookup} uses the operating system + * facilities to perform name resolution. It may not need to perform any network + * communication. To perform name resolution the way other applications on the same + * system do, use {@link lookup}. + * + * ```js + * const dns = require('dns'); + * + * dns.lookup('example.org', (err, address, family) => { + * console.log('address: %j family: IPv%s', address, family); + * }); + * // address: "93.184.216.34" family: IPv4 + * ``` + * + * All other functions in the `dns` module connect to an actual DNS server to + * perform name resolution. They will always use the network to perform DNS + * queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform + * DNS queries, bypassing other name-resolution facilities. + * + * ```js + * const dns = require('dns'); + * + * dns.resolve4('archive.org', (err, addresses) => { + * if (err) throw err; + * + * console.log(`addresses: ${JSON.stringify(addresses)}`); + * + * addresses.forEach((a) => { + * dns.reverse(a, (err, hostnames) => { + * if (err) { + * throw err; + * } + * console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`); + * }); + * }); + * }); + * ``` + * + * See the `Implementation considerations section` for more information. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/dns.js) + */ +declare module 'dns' { + import * as dnsPromises from 'node:dns/promises'; + // Supported getaddrinfo flags. + export const ADDRCONFIG: number; + export const V4MAPPED: number; + /** + * If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as + * well as IPv4 mapped IPv6 addresses. + */ + export const ALL: number; + export interface LookupOptions { + family?: number | undefined; + hints?: number | undefined; + all?: boolean | undefined; + /** + * @default true + */ + verbatim?: boolean | undefined; + } + export interface LookupOneOptions extends LookupOptions { + all?: false | undefined; + } + export interface LookupAllOptions extends LookupOptions { + all: true; + } + export interface LookupAddress { + address: string; + family: number; + } + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the arguments for `callback` change to`(err, addresses)`, with `addresses` being an array of objects with the + * properties `address` and `family`. + * + * On error, `err` is an `Error` object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dns.lookup()` does not necessarily have anything to do with the DNS protocol. + * The implementation uses an operating system facility that can associate names + * with addresses, and vice versa. This implementation can have subtle but + * important consequences on the behavior of any Node.js program. Please take some + * time to consult the `Implementation considerations section` before using`dns.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('dns'); + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * dns.lookup('example.com', options, (err, address, family) => + * console.log('address: %j family: IPv%s', address, family)); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dns.lookup('example.com', options, (err, addresses) => + * console.log('addresses: %j', addresses)); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, and `all`is not set to `true`, it returns a `Promise` for an `Object` with `address` and`family` properties. + * @since v0.1.90 + */ + export function lookup(hostname: string, family: number, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void): void; + export function lookup(hostname: string, options: LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void): void; + export function lookup(hostname: string, options: LookupAllOptions, callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void): void; + export function lookup(hostname: string, options: LookupOptions, callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void): void; + export function lookup(hostname: string, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void): void; + export namespace lookup { + function __promisify__(hostname: string, options: LookupAllOptions): Promise; + function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise; + function __promisify__(hostname: string, options: LookupOptions): Promise; + } + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On an error, `err` is an `Error` object, where `err.code` is the error code. + * + * ```js + * const dns = require('dns'); + * dns.lookupService('127.0.0.1', 22, (err, hostname, service) => { + * console.log(hostname, service); + * // Prints: localhost ssh + * }); + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, it returns a`Promise` for an `Object` with `hostname` and `service` properties. + * @since v0.11.14 + */ + export function lookupService(address: string, port: number, callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void): void; + export namespace lookupService { + function __promisify__( + address: string, + port: number + ): Promise<{ + hostname: string; + service: string; + }>; + } + export interface ResolveOptions { + ttl: boolean; + } + export interface ResolveWithTtlOptions extends ResolveOptions { + ttl: true; + } + export interface RecordWithTtl { + address: string; + ttl: number; + } + /** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */ + export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; + export interface AnyARecord extends RecordWithTtl { + type: 'A'; + } + export interface AnyAaaaRecord extends RecordWithTtl { + type: 'AAAA'; + } + export interface CaaRecord { + critial: number; + issue?: string | undefined; + issuewild?: string | undefined; + iodef?: string | undefined; + contactemail?: string | undefined; + contactphone?: string | undefined; + } + export interface MxRecord { + priority: number; + exchange: string; + } + export interface AnyMxRecord extends MxRecord { + type: 'MX'; + } + export interface NaptrRecord { + flags: string; + service: string; + regexp: string; + replacement: string; + order: number; + preference: number; + } + export interface AnyNaptrRecord extends NaptrRecord { + type: 'NAPTR'; + } + export interface SoaRecord { + nsname: string; + hostmaster: string; + serial: number; + refresh: number; + retry: number; + expire: number; + minttl: number; + } + export interface AnySoaRecord extends SoaRecord { + type: 'SOA'; + } + export interface SrvRecord { + priority: number; + weight: number; + port: number; + name: string; + } + export interface AnySrvRecord extends SrvRecord { + type: 'SRV'; + } + export interface AnyTxtRecord { + type: 'TXT'; + entries: string[]; + } + export interface AnyNsRecord { + type: 'NS'; + value: string; + } + export interface AnyPtrRecord { + type: 'PTR'; + value: string; + } + export interface AnyCnameRecord { + type: 'CNAME'; + value: string; + } + export type AnyRecord = AnyARecord | AnyAaaaRecord | AnyCnameRecord | AnyMxRecord | AnyNaptrRecord | AnyNsRecord | AnyPtrRecord | AnySoaRecord | AnySrvRecord | AnyTxtRecord; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. The `callback` function has arguments`(err, records)`. When successful, `records` will be an array of resource + * records. The type and structure of individual results varies based on `rrtype`: + * + * + * + * On error, `err` is an `Error` object, where `err.code` is one of the `DNS error codes`. + * @since v0.1.27 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + export function resolve(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'A', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'AAAA', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'ANY', callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'CNAME', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'MX', callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'NAPTR', callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'NS', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'PTR', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'SOA', callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void): void; + export function resolve(hostname: string, rrtype: 'SRV', callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'TXT', callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void): void; + export function resolve( + hostname: string, + rrtype: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[]) => void + ): void; + export namespace resolve { + function __promisify__(hostname: string, rrtype?: 'A' | 'AAAA' | 'CNAME' | 'NS' | 'PTR'): Promise; + function __promisify__(hostname: string, rrtype: 'ANY'): Promise; + function __promisify__(hostname: string, rrtype: 'MX'): Promise; + function __promisify__(hostname: string, rrtype: 'NAPTR'): Promise; + function __promisify__(hostname: string, rrtype: 'SOA'): Promise; + function __promisify__(hostname: string, rrtype: 'SRV'): Promise; + function __promisify__(hostname: string, rrtype: 'TXT'): Promise; + function __promisify__(hostname: string, rrtype: string): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve4(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve4(hostname: string, options: ResolveWithTtlOptions, callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void): void; + export function resolve4(hostname: string, options: ResolveOptions, callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void): void; + export namespace resolve4 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv6 addresses (`AAAA` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv6 addresses. + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve6(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve6(hostname: string, options: ResolveWithTtlOptions, callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void): void; + export function resolve6(hostname: string, options: ResolveOptions, callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void): void; + export namespace resolve6 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`). + * @since v0.3.2 + */ + export function resolveCname(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export namespace resolveCname { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of certification authority authorization records + * available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + export function resolveCaa(hostname: string, callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void): void; + export namespace resolveCaa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v0.1.27 + */ + export function resolveMx(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void): void; + export namespace resolveMx { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of + * objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v0.9.12 + */ + export function resolveNaptr(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void): void; + export namespace resolveNaptr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`). + * @since v0.1.90 + */ + export function resolveNs(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export namespace resolveNs { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of strings containing the reply records. + * @since v6.0.0 + */ + export function resolvePtr(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export namespace resolvePtr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. The `address` argument passed to the `callback` function will + * be an object with the following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v0.11.10 + */ + export function resolveSoa(hostname: string, callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void): void; + export namespace resolveSoa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of objects with the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v0.1.27 + */ + export function resolveSrv(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void): void; + export namespace resolveSrv { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a + * two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v0.1.27 + */ + export function resolveTxt(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void): void; + export namespace resolveTxt { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * The `ret` argument passed to the `callback` function will be an array containing + * various types of records. Each object has a property `type` that indicates the + * type of the current record. And depending on the `type`, additional properties + * will be present on the object: + * + * + * + * Here is an example of the `ret` object passed to the callback: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * + * DNS server operators may choose not to respond to `ANY`queries. It may be better to call individual methods like {@link resolve4},{@link resolveMx}, and so on. For more details, see [RFC + * 8482](https://tools.ietf.org/html/rfc8482). + */ + export function resolveAny(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void): void; + export namespace resolveAny { + function __promisify__(hostname: string): Promise; + } + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, `err` is an `Error` object, where `err.code` is + * one of the `DNS error codes`. + * @since v0.1.16 + */ + export function reverse(ip: string, callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void): void; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dns.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dns.setServers()` method must not be called while a DNS query is in + * progress. + * + * The {@link setServers} method affects only {@link resolve},`dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}). + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v0.11.3 + * @param servers array of `RFC 5952` formatted addresses + */ + export function setServers(servers: ReadonlyArray): void; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v0.11.3 + */ + export function getServers(): string[]; + /** + * Set the default value of `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `ipv4first` and {@link setDefaultResultOrder} have higher + * priority than `--dns-result-order`. When using `worker threads`,{@link setDefaultResultOrder} from the main thread won't affect the default + * dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + export function setDefaultResultOrder(order: 'ipv4first' | 'verbatim'): void; + // Error codes + export const NODATA: string; + export const FORMERR: string; + export const SERVFAIL: string; + export const NOTFOUND: string; + export const NOTIMP: string; + export const REFUSED: string; + export const BADQUERY: string; + export const BADNAME: string; + export const BADFAMILY: string; + export const BADRESP: string; + export const CONNREFUSED: string; + export const TIMEOUT: string; + export const EOF: string; + export const FILE: string; + export const NOMEM: string; + export const DESTRUCTION: string; + export const BADSTR: string; + export const BADFLAGS: string; + export const NONAME: string; + export const BADHINTS: string; + export const NOTINITIALIZED: string; + export const LOADIPHLPAPI: string; + export const ADDRGETNETWORKPARAMS: string; + export const CANCELLED: string; + export interface ResolverOptions { + timeout?: number | undefined; + /** + * @default 4 + */ + tries?: number; + } + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using `resolver.setServers()` does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('dns'); + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org', (err, addresses) => { + * // ... + * }); + * ``` + * + * The following methods from the `dns` module are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v8.3.0 + */ + export class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default, and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } + export { dnsPromises as promises }; +} +declare module 'node:dns' { + export * from 'dns'; +} diff --git a/node_modules/@types/node/dns/promises.d.ts b/node_modules/@types/node/dns/promises.d.ts new file mode 100755 index 0000000..77cd807 --- /dev/null +++ b/node_modules/@types/node/dns/promises.d.ts @@ -0,0 +1,370 @@ +/** + * The `dns.promises` API provides an alternative set of asynchronous DNS methods + * that return `Promise` objects rather than using callbacks. The API is accessible + * via `require('dns').promises` or `require('dns/promises')`. + * @since v10.6.0 + */ +declare module 'dns/promises' { + import { + LookupAddress, + LookupOneOptions, + LookupAllOptions, + LookupOptions, + AnyRecord, + CaaRecord, + MxRecord, + NaptrRecord, + SoaRecord, + SrvRecord, + ResolveWithTtlOptions, + RecordWithTtl, + ResolveOptions, + ResolverOptions, + } from 'node:dns'; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v10.6.0 + */ + function getServers(): string[]; + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the `Promise` is resolved with `addresses`being an array of objects with the properties `address` and `family`. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dnsPromises.lookup()` does not necessarily have anything to do with the DNS + * protocol. The implementation uses an operating system facility that can + * associate names with addresses, and vice versa. This implementation can have + * subtle but important consequences on the behavior of any Node.js program. Please + * take some time to consult the `Implementation considerations section` before + * using `dnsPromises.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('dns'); + * const dnsPromises = dns.promises; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('address: %j family: IPv%s', result.address, result.family); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * }); + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('addresses: %j', result); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * }); + * ``` + * @since v10.6.0 + */ + function lookup(hostname: string, family: number): Promise; + function lookup(hostname: string, options: LookupOneOptions): Promise; + function lookup(hostname: string, options: LookupAllOptions): Promise; + function lookup(hostname: string, options: LookupOptions): Promise; + function lookup(hostname: string): Promise; + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * + * ```js + * const dnsPromises = require('dns').promises; + * dnsPromises.lookupService('127.0.0.1', 22).then((result) => { + * console.log(result.hostname, result.service); + * // Prints: localhost ssh + * }); + * ``` + * @since v10.6.0 + */ + function lookupService( + address: string, + port: number + ): Promise<{ + hostname: string; + service: string; + }>; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. When successful, the `Promise` is resolved with an + * array of resource records. The type and structure of individual results vary + * based on `rrtype`: + * + * + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + function resolve(hostname: string): Promise; + function resolve(hostname: string, rrtype: 'A'): Promise; + function resolve(hostname: string, rrtype: 'AAAA'): Promise; + function resolve(hostname: string, rrtype: 'ANY'): Promise; + function resolve(hostname: string, rrtype: 'CAA'): Promise; + function resolve(hostname: string, rrtype: 'CNAME'): Promise; + function resolve(hostname: string, rrtype: 'MX'): Promise; + function resolve(hostname: string, rrtype: 'NAPTR'): Promise; + function resolve(hostname: string, rrtype: 'NS'): Promise; + function resolve(hostname: string, rrtype: 'PTR'): Promise; + function resolve(hostname: string, rrtype: 'SOA'): Promise; + function resolve(hostname: string, rrtype: 'SRV'): Promise; + function resolve(hostname: string, rrtype: 'TXT'): Promise; + function resolve(hostname: string, rrtype: string): Promise; + /** + * Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4 + * addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve4(hostname: string): Promise; + function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve4(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv6 + * addresses. + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve6(hostname: string): Promise; + function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve6(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * On success, the `Promise` is resolved with an array containing various types of + * records. Each object has a property `type` that indicates the type of the + * current record. And depending on the `type`, additional properties will be + * present on the object: + * + * + * + * Here is an example of the result object: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * @since v10.6.0 + */ + function resolveAny(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success, + * the `Promise` is resolved with an array of objects containing available + * certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + function resolveCaa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success, + * the `Promise` is resolved with an array of canonical name records available for + * the `hostname` (e.g. `['bar.example.com']`). + * @since v10.6.0 + */ + function resolveCname(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects + * containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v10.6.0 + */ + function resolveMx(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array + * of objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v10.6.0 + */ + function resolveNaptr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server + * records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`). + * @since v10.6.0 + */ + function resolveNs(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings + * containing the reply records. + * @since v10.6.0 + */ + function resolvePtr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. On success, the `Promise` is resolved with an object with the + * following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v10.6.0 + */ + function resolveSoa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with + * the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v10.6.0 + */ + function resolveSrv(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array + * of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v10.6.0 + */ + function resolveTxt(hostname: string): Promise; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + */ + function reverse(ip: string): Promise; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dnsPromises.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dnsPromises.setServers()` method must not be called while a DNS query is in + * progress. + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v10.6.0 + * @param servers array of `RFC 5952` formatted addresses + */ + function setServers(servers: ReadonlyArray): void; + /** + * Set the default value of `verbatim` in `dns.lookup()` and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `ipv4first` and `dnsPromises.setDefaultResultOrder()` have + * higher priority than `--dns-result-order`. When using `worker threads`,`dnsPromises.setDefaultResultOrder()` from the main thread won't affect the + * default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + function setDefaultResultOrder(order: 'ipv4first' | 'verbatim'): void; + class Resolver { + constructor(options?: ResolverOptions); + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } +} +declare module 'node:dns/promises' { + export * from 'dns/promises'; +} diff --git a/node_modules/@types/node/dom-events.d.ts b/node_modules/@types/node/dom-events.d.ts new file mode 100755 index 0000000..b9c1c3a --- /dev/null +++ b/node_modules/@types/node/dom-events.d.ts @@ -0,0 +1,126 @@ +export {}; // Don't export anything! + +//// DOM-like Events +// NB: The Event / EventTarget / EventListener implementations below were copied +// from lib.dom.d.ts, then edited to reflect Node's documentation at +// https://nodejs.org/api/events.html#class-eventtarget. +// Please read that link to understand important implementation differences. + +// This conditional type will be the existing global Event in a browser, or +// the copy below in a Node environment. +type __Event = typeof globalThis extends { onmessage: any, Event: any } +? {} +: { + /** This is not used in Node.js and is provided purely for completeness. */ + readonly bubbles: boolean; + /** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */ + cancelBubble: () => void; + /** True if the event was created with the cancelable option */ + readonly cancelable: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly composed: boolean; + /** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */ + composedPath(): [EventTarget?] + /** Alias for event.target. */ + readonly currentTarget: EventTarget | null; + /** Is true if cancelable is true and event.preventDefault() has been called. */ + readonly defaultPrevented: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly eventPhase: 0 | 2; + /** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */ + readonly isTrusted: boolean; + /** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */ + preventDefault(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + returnValue: boolean; + /** Alias for event.target. */ + readonly srcElement: EventTarget | null; + /** Stops the invocation of event listeners after the current one completes. */ + stopImmediatePropagation(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + stopPropagation(): void; + /** The `EventTarget` dispatching the event */ + readonly target: EventTarget | null; + /** The millisecond timestamp when the Event object was created. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; +}; + +// See comment above explaining conditional type +type __EventTarget = typeof globalThis extends { onmessage: any, EventTarget: any } +? {} +: { + /** + * Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value. + * + * If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched. + * + * The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification. + * Specifically, the `capture` option is used as part of the key when registering a `listener`. + * Any individual `listener` may be added once with `capture = false`, and once with `capture = true`. + */ + addEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: AddEventListenerOptions | boolean, + ): void; + /** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same type, callback, and options. */ + removeEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: EventListenerOptions | boolean, + ): void; +}; + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +interface EventListenerOptions { + /** Not directly used by Node.js. Added for API completeness. Default: `false`. */ + capture?: boolean; +} + +interface AddEventListenerOptions extends EventListenerOptions { + /** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */ + once?: boolean; + /** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */ + passive?: boolean; +} + +interface EventListener { + (evt: Event): void; +} + +interface EventListenerObject { + handleEvent(object: Event): void; +} + +import {} from 'events'; // Make this an ambient declaration +declare global { + /** An event which takes place in the DOM. */ + interface Event extends __Event {} + var Event: typeof globalThis extends { onmessage: any, Event: infer T } + ? T + : { + prototype: __Event; + new (type: string, eventInitDict?: EventInit): __Event; + }; + + /** + * EventTarget is a DOM interface implemented by objects that can + * receive events and may have listeners for them. + */ + interface EventTarget extends __EventTarget {} + var EventTarget: typeof globalThis extends { onmessage: any, EventTarget: infer T } + ? T + : { + prototype: __EventTarget; + new (): __EventTarget; + }; +} diff --git a/node_modules/@types/node/domain.d.ts b/node_modules/@types/node/domain.d.ts new file mode 100755 index 0000000..fafe68a --- /dev/null +++ b/node_modules/@types/node/domain.d.ts @@ -0,0 +1,170 @@ +/** + * **This module is pending deprecation.** Once a replacement API has been + * finalized, this module will be fully deprecated. Most developers should + * **not** have cause to use this module. Users who absolutely must have + * the functionality that domains provide may rely on it for the time being + * but should expect to have to migrate to a different solution + * in the future. + * + * Domains provide a way to handle multiple different IO operations as a + * single group. If any of the event emitters or callbacks registered to a + * domain emit an `'error'` event, or throw an error, then the domain object + * will be notified, rather than losing the context of the error in the`process.on('uncaughtException')` handler, or causing the program to + * exit immediately with an error code. + * @deprecated Since v1.4.2 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/domain.js) + */ +declare module 'domain' { + import EventEmitter = require('node:events'); + /** + * The `Domain` class encapsulates the functionality of routing errors and + * uncaught exceptions to the active `Domain` object. + * + * To handle the errors that it catches, listen to its `'error'` event. + */ + class Domain extends EventEmitter { + /** + * An array of timers and event emitters that have been explicitly added + * to the domain. + */ + members: Array; + /** + * The `enter()` method is plumbing used by the `run()`, `bind()`, and`intercept()` methods to set the active domain. It sets `domain.active` and`process.domain` to the domain, and implicitly + * pushes the domain onto the domain + * stack managed by the domain module (see {@link exit} for details on the + * domain stack). The call to `enter()` delimits the beginning of a chain of + * asynchronous calls and I/O operations bound to a domain. + * + * Calling `enter()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + enter(): void; + /** + * The `exit()` method exits the current domain, popping it off the domain stack. + * Any time execution is going to switch to the context of a different chain of + * asynchronous calls, it's important to ensure that the current domain is exited. + * The call to `exit()` delimits either the end of or an interruption to the chain + * of asynchronous calls and I/O operations bound to a domain. + * + * If there are multiple, nested domains bound to the current execution context,`exit()` will exit any domains nested within this domain. + * + * Calling `exit()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + exit(): void; + /** + * Run the supplied function in the context of the domain, implicitly + * binding all event emitters, timers, and lowlevel requests that are + * created in that context. Optionally, arguments can be passed to + * the function. + * + * This is the most basic way to use a domain. + * + * ```js + * const domain = require('domain'); + * const fs = require('fs'); + * const d = domain.create(); + * d.on('error', (er) => { + * console.error('Caught error!', er); + * }); + * d.run(() => { + * process.nextTick(() => { + * setTimeout(() => { // Simulating some various async stuff + * fs.open('non-existent file', 'r', (er, fd) => { + * if (er) throw er; + * // proceed... + * }); + * }, 100); + * }); + * }); + * ``` + * + * In this example, the `d.on('error')` handler will be triggered, rather + * than crashing the program. + */ + run(fn: (...args: any[]) => T, ...args: any[]): T; + /** + * Explicitly adds an emitter to the domain. If any event handlers called by + * the emitter throw an error, or if the emitter emits an `'error'` event, it + * will be routed to the domain's `'error'` event, just like with implicit + * binding. + * + * This also works with timers that are returned from `setInterval()` and `setTimeout()`. If their callback function throws, it will be caught by + * the domain `'error'` handler. + * + * If the Timer or `EventEmitter` was already bound to a domain, it is removed + * from that one, and bound to this one instead. + * @param emitter emitter or timer to be added to the domain + */ + add(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The opposite of {@link add}. Removes domain handling from the + * specified emitter. + * @param emitter emitter or timer to be removed from the domain + */ + remove(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The returned function will be a wrapper around the supplied callback + * function. When the returned function is called, any errors that are + * thrown will be routed to the domain's `'error'` event. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.bind((er, data) => { + * // If this throws, it will also be passed to the domain. + * return cb(er, data ? JSON.parse(data) : null); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The bound function + */ + bind(callback: T): T; + /** + * This method is almost identical to {@link bind}. However, in + * addition to catching thrown errors, it will also intercept `Error` objects sent as the first argument to the function. + * + * In this way, the common `if (err) return callback(err);` pattern can be replaced + * with a single error handler in a single place. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.intercept((data) => { + * // Note, the first argument is never passed to the + * // callback since it is assumed to be the 'Error' argument + * // and thus intercepted by the domain. + * + * // If this throws, it will also be passed to the domain + * // so the error-handling logic can be moved to the 'error' + * // event on the domain instead of being repeated throughout + * // the program. + * return cb(null, JSON.parse(data)); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The intercepted function + */ + intercept(callback: T): T; + } + function create(): Domain; +} +declare module 'node:domain' { + export * from 'domain'; +} diff --git a/node_modules/@types/node/events.d.ts b/node_modules/@types/node/events.d.ts new file mode 100755 index 0000000..4633df1 --- /dev/null +++ b/node_modules/@types/node/events.d.ts @@ -0,0 +1,678 @@ +/** + * Much of the Node.js core API is built around an idiomatic asynchronous + * event-driven architecture in which certain kinds of objects (called "emitters") + * emit named events that cause `Function` objects ("listeners") to be called. + * + * For instance: a `net.Server` object emits an event each time a peer + * connects to it; a `fs.ReadStream` emits an event when the file is opened; + * a `stream` emits an event whenever data is available to be read. + * + * All objects that emit events are instances of the `EventEmitter` class. These + * objects expose an `eventEmitter.on()` function that allows one or more + * functions to be attached to named events emitted by the object. Typically, + * event names are camel-cased strings but any valid JavaScript property key + * can be used. + * + * When the `EventEmitter` object emits an event, all of the functions attached + * to that specific event are called _synchronously_. Any values returned by the + * called listeners are _ignored_ and discarded. + * + * The following example shows a simple `EventEmitter` instance with a single + * listener. The `eventEmitter.on()` method is used to register listeners, while + * the `eventEmitter.emit()` method is used to trigger the event. + * + * ```js + * const EventEmitter = require('events'); + * + * class MyEmitter extends EventEmitter {} + * + * const myEmitter = new MyEmitter(); + * myEmitter.on('event', () => { + * console.log('an event occurred!'); + * }); + * myEmitter.emit('event'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/events.js) + */ +declare module 'events' { + // NOTE: This class is in the docs but is **not actually exported** by Node. + // If https://github.com/nodejs/node/issues/39903 gets resolved and Node + // actually starts exporting the class, uncomment below. + + // import { EventListener, EventListenerObject } from '__dom-events'; + // /** The NodeEventTarget is a Node.js-specific extension to EventTarget that emulates a subset of the EventEmitter API. */ + // interface NodeEventTarget extends EventTarget { + // /** + // * Node.js-specific extension to the `EventTarget` class that emulates the equivalent `EventEmitter` API. + // * The only difference between `addListener()` and `addEventListener()` is that addListener() will return a reference to the EventTarget. + // */ + // addListener(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that returns an array of event `type` names for which event listeners are registered. */ + // eventNames(): string[]; + // /** Node.js-specific extension to the `EventTarget` class that returns the number of event listeners registered for the `type`. */ + // listenerCount(type: string): number; + // /** Node.js-specific alias for `eventTarget.removeListener()`. */ + // off(type: string, listener: EventListener | EventListenerObject): this; + // /** Node.js-specific alias for `eventTarget.addListener()`. */ + // on(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that adds a `once` listener for the given event `type`. This is equivalent to calling `on` with the `once` option set to `true`. */ + // once(type: string, listener: EventListener | EventListenerObject): this; + // /** + // * Node.js-specific extension to the `EventTarget` class. + // * If `type` is specified, removes all registered listeners for `type`, + // * otherwise removes all registered listeners. + // */ + // removeAllListeners(type: string): this; + // /** + // * Node.js-specific extension to the `EventTarget` class that removes the listener for the given `type`. + // * The only difference between `removeListener()` and `removeEventListener()` is that `removeListener()` will return a reference to the `EventTarget`. + // */ + // removeListener(type: string, listener: EventListener | EventListenerObject): this; + // } + + interface EventEmitterOptions { + /** + * Enables automatic capturing of promise rejection. + */ + captureRejections?: boolean | undefined; + } + // Any EventTarget with a Node-style `once` function + interface _NodeEventTarget { + once(eventName: string | symbol, listener: (...args: any[]) => void): this; + } + // Any EventTarget with a DOM-style `addEventListener` + interface _DOMEventTarget { + addEventListener( + eventName: string, + listener: (...args: any[]) => void, + opts?: { + once: boolean; + } + ): any; + } + interface StaticEventEmitterOptions { + signal?: AbortSignal | undefined; + } + interface EventEmitter extends NodeJS.EventEmitter {} + /** + * The `EventEmitter` class is defined and exposed by the `events` module: + * + * ```js + * const EventEmitter = require('events'); + * ``` + * + * All `EventEmitter`s emit the event `'newListener'` when new listeners are + * added and `'removeListener'` when existing listeners are removed. + * + * It supports the following option: + * @since v0.1.26 + */ + class EventEmitter { + constructor(options?: EventEmitterOptions); + /** + * Creates a `Promise` that is fulfilled when the `EventEmitter` emits the given + * event or that is rejected if the `EventEmitter` emits `'error'` while waiting. + * The `Promise` will resolve with an array of all the arguments emitted to the + * given event. + * + * This method is intentionally generic and works with the web platform [EventTarget](https://dom.spec.whatwg.org/#interface-eventtarget) interface, which has no special`'error'` event + * semantics and does not listen to the `'error'` event. + * + * ```js + * const { once, EventEmitter } = require('events'); + * + * async function run() { + * const ee = new EventEmitter(); + * + * process.nextTick(() => { + * ee.emit('myevent', 42); + * }); + * + * const [value] = await once(ee, 'myevent'); + * console.log(value); + * + * const err = new Error('kaboom'); + * process.nextTick(() => { + * ee.emit('error', err); + * }); + * + * try { + * await once(ee, 'myevent'); + * } catch (err) { + * console.log('error happened', err); + * } + * } + * + * run(); + * ``` + * + * The special handling of the `'error'` event is only used when `events.once()`is used to wait for another event. If `events.once()` is used to wait for the + * '`error'` event itself, then it is treated as any other kind of event without + * special handling: + * + * ```js + * const { EventEmitter, once } = require('events'); + * + * const ee = new EventEmitter(); + * + * once(ee, 'error') + * .then(([err]) => console.log('ok', err.message)) + * .catch((err) => console.log('error', err.message)); + * + * ee.emit('error', new Error('boom')); + * + * // Prints: ok boom + * ``` + * + * An `AbortSignal` can be used to cancel waiting for the event: + * + * ```js + * const { EventEmitter, once } = require('events'); + * + * const ee = new EventEmitter(); + * const ac = new AbortController(); + * + * async function foo(emitter, event, signal) { + * try { + * await once(emitter, event, { signal }); + * console.log('event emitted!'); + * } catch (error) { + * if (error.name === 'AbortError') { + * console.error('Waiting for the event was canceled!'); + * } else { + * console.error('There was an error', error.message); + * } + * } + * } + * + * foo(ee, 'foo', ac.signal); + * ac.abort(); // Abort waiting for the event + * ee.emit('foo'); // Prints: Waiting for the event was canceled! + * ``` + * @since v11.13.0, v10.16.0 + */ + static once(emitter: _NodeEventTarget, eventName: string | symbol, options?: StaticEventEmitterOptions): Promise; + static once(emitter: _DOMEventTarget, eventName: string, options?: StaticEventEmitterOptions): Promise; + /** + * ```js + * const { on, EventEmitter } = require('events'); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo')) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * ``` + * + * Returns an `AsyncIterator` that iterates `eventName` events. It will throw + * if the `EventEmitter` emits `'error'`. It removes all listeners when + * exiting the loop. The `value` returned by each iteration is an array + * composed of the emitted event arguments. + * + * An `AbortSignal` can be used to cancel waiting on events: + * + * ```js + * const { on, EventEmitter } = require('events'); + * const ac = new AbortController(); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo', { signal: ac.signal })) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * + * process.nextTick(() => ac.abort()); + * ``` + * @since v13.6.0, v12.16.0 + * @param eventName The name of the event being listened for + * @return that iterates `eventName` events emitted by the `emitter` + */ + static on(emitter: NodeJS.EventEmitter, eventName: string, options?: StaticEventEmitterOptions): AsyncIterableIterator; + /** + * A class method that returns the number of listeners for the given `eventName`registered on the given `emitter`. + * + * ```js + * const { EventEmitter, listenerCount } = require('events'); + * const myEmitter = new EventEmitter(); + * myEmitter.on('event', () => {}); + * myEmitter.on('event', () => {}); + * console.log(listenerCount(myEmitter, 'event')); + * // Prints: 2 + * ``` + * @since v0.9.12 + * @deprecated Since v3.2.0 - Use `listenerCount` instead. + * @param emitter The emitter to query + * @param eventName The event name + */ + static listenerCount(emitter: NodeJS.EventEmitter, eventName: string | symbol): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * For `EventEmitter`s this behaves exactly the same as calling `.listeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the event listeners for the + * event target. This is useful for debugging and diagnostic purposes. + * + * ```js + * const { getEventListeners, EventEmitter } = require('events'); + * + * { + * const ee = new EventEmitter(); + * const listener = () => console.log('Events are fun'); + * ee.on('foo', listener); + * getEventListeners(ee, 'foo'); // [listener] + * } + * { + * const et = new EventTarget(); + * const listener = () => console.log('Events are fun'); + * et.addEventListener('foo', listener); + * getEventListeners(et, 'foo'); // [listener] + * } + * ``` + * @since v15.2.0, v14.17.0 + */ + static getEventListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter, name: string | symbol): Function[]; + /** + * ```js + * const { + * setMaxListeners, + * EventEmitter + * } = require('events'); + * + * const target = new EventTarget(); + * const emitter = new EventEmitter(); + * + * setMaxListeners(5, target, emitter); + * ``` + * @since v15.4.0 + * @param n A non-negative number. The maximum number of listeners per `EventTarget` event. + * @param eventsTargets Zero or more {EventTarget} or {EventEmitter} instances. If none are specified, `n` is set as the default max for all newly created {EventTarget} and {EventEmitter} + * objects. + */ + static setMaxListeners(n?: number, ...eventTargets: Array<_DOMEventTarget | NodeJS.EventEmitter>): void; + /** + * This symbol shall be used to install a listener for only monitoring `'error'` + * events. Listeners installed using this symbol are called before the regular + * `'error'` listeners are called. + * + * Installing a listener using this symbol does not change the behavior once an + * `'error'` event is emitted, therefore the process will still crash if no + * regular `'error'` listener is installed. + */ + static readonly errorMonitor: unique symbol; + static readonly captureRejectionSymbol: unique symbol; + /** + * Sets or gets the default captureRejection value for all emitters. + */ + // TODO: These should be described using static getter/setter pairs: + static captureRejections: boolean; + static defaultMaxListeners: number; + } + import internal = require('node:events'); + namespace EventEmitter { + // Should just be `export { EventEmitter }`, but that doesn't work in TypeScript 3.4 + export { internal as EventEmitter }; + export interface Abortable { + /** + * When provided the corresponding `AbortController` can be used to cancel an asynchronous action. + */ + signal?: AbortSignal | undefined; + } + } + global { + namespace NodeJS { + interface EventEmitter { + /** + * Alias for `emitter.on(eventName, listener)`. + * @since v0.1.26 + */ + addListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Adds the `listener` function to the end of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * const myEE = new EventEmitter(); + * myEE.on('foo', () => console.log('a')); + * myEE.prependListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.1.101 + * @param eventName The name of the event. + * @param listener The callback function + */ + on(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName`. The + * next time `eventName` is triggered, this listener is removed and then invoked. + * + * ```js + * server.once('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependOnceListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * const myEE = new EventEmitter(); + * myEE.once('foo', () => console.log('a')); + * myEE.prependOnceListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.3.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + once(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Removes the specified `listener` from the listener array for the event named`eventName`. + * + * ```js + * const callback = (stream) => { + * console.log('someone connected!'); + * }; + * server.on('connection', callback); + * // ... + * server.removeListener('connection', callback); + * ``` + * + * `removeListener()` will remove, at most, one instance of a listener from the + * listener array. If any single listener has been added multiple times to the + * listener array for the specified `eventName`, then `removeListener()` must be + * called multiple times to remove each instance. + * + * Once an event is emitted, all listeners attached to it at the + * time of emitting are called in order. This implies that any`removeListener()` or `removeAllListeners()` calls _after_ emitting and _before_ the last listener finishes execution + * will not remove them from`emit()` in progress. Subsequent events behave as expected. + * + * ```js + * const myEmitter = new MyEmitter(); + * + * const callbackA = () => { + * console.log('A'); + * myEmitter.removeListener('event', callbackB); + * }; + * + * const callbackB = () => { + * console.log('B'); + * }; + * + * myEmitter.on('event', callbackA); + * + * myEmitter.on('event', callbackB); + * + * // callbackA removes listener callbackB but it will still be called. + * // Internal listener array at time of emit [callbackA, callbackB] + * myEmitter.emit('event'); + * // Prints: + * // A + * // B + * + * // callbackB is now removed. + * // Internal listener array [callbackA] + * myEmitter.emit('event'); + * // Prints: + * // A + * ``` + * + * Because listeners are managed using an internal array, calling this will + * change the position indices of any listener registered _after_ the listener + * being removed. This will not impact the order in which listeners are called, + * but it means that any copies of the listener array as returned by + * the `emitter.listeners()` method will need to be recreated. + * + * When a single function has been added as a handler multiple times for a single + * event (as in the example below), `removeListener()` will remove the most + * recently added instance. In the example the `once('ping')`listener is removed: + * + * ```js + * const ee = new EventEmitter(); + * + * function pong() { + * console.log('pong'); + * } + * + * ee.on('ping', pong); + * ee.once('ping', pong); + * ee.removeListener('ping', pong); + * + * ee.emit('ping'); + * ee.emit('ping'); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Alias for `emitter.removeListener()`. + * @since v10.0.0 + */ + off(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Removes all listeners, or those of the specified `eventName`. + * + * It is bad practice to remove listeners added elsewhere in the code, + * particularly when the `EventEmitter` instance was created by some other + * component or module (e.g. sockets or file streams). + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeAllListeners(event?: string | symbol): this; + /** + * By default `EventEmitter`s will print a warning if more than `10` listeners are + * added for a particular event. This is a useful default that helps finding + * memory leaks. The `emitter.setMaxListeners()` method allows the limit to be + * modified for this specific `EventEmitter` instance. The value can be set to`Infinity` (or `0`) to indicate an unlimited number of listeners. + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.3.5 + */ + setMaxListeners(n: number): this; + /** + * Returns the current max listener value for the `EventEmitter` which is either + * set by `emitter.setMaxListeners(n)` or defaults to {@link defaultMaxListeners}. + * @since v1.0.0 + */ + getMaxListeners(): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * console.log(util.inspect(server.listeners('connection'))); + * // Prints: [ [Function] ] + * ``` + * @since v0.1.26 + */ + listeners(eventName: string | symbol): Function[]; + /** + * Returns a copy of the array of listeners for the event named `eventName`, + * including any wrappers (such as those created by `.once()`). + * + * ```js + * const emitter = new EventEmitter(); + * emitter.once('log', () => console.log('log once')); + * + * // Returns a new Array with a function `onceWrapper` which has a property + * // `listener` which contains the original listener bound above + * const listeners = emitter.rawListeners('log'); + * const logFnWrapper = listeners[0]; + * + * // Logs "log once" to the console and does not unbind the `once` event + * logFnWrapper.listener(); + * + * // Logs "log once" to the console and removes the listener + * logFnWrapper(); + * + * emitter.on('log', () => console.log('log persistently')); + * // Will return a new Array with a single function bound by `.on()` above + * const newListeners = emitter.rawListeners('log'); + * + * // Logs "log persistently" twice + * newListeners[0](); + * emitter.emit('log'); + * ``` + * @since v9.4.0 + */ + rawListeners(eventName: string | symbol): Function[]; + /** + * Synchronously calls each of the listeners registered for the event named`eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * Returns `true` if the event had listeners, `false` otherwise. + * + * ```js + * const EventEmitter = require('events'); + * const myEmitter = new EventEmitter(); + * + * // First listener + * myEmitter.on('event', function firstListener() { + * console.log('Helloooo! first listener'); + * }); + * // Second listener + * myEmitter.on('event', function secondListener(arg1, arg2) { + * console.log(`event with parameters ${arg1}, ${arg2} in second listener`); + * }); + * // Third listener + * myEmitter.on('event', function thirdListener(...args) { + * const parameters = args.join(', '); + * console.log(`event with parameters ${parameters} in third listener`); + * }); + * + * console.log(myEmitter.listeners('event')); + * + * myEmitter.emit('event', 1, 2, 3, 4, 5); + * + * // Prints: + * // [ + * // [Function: firstListener], + * // [Function: secondListener], + * // [Function: thirdListener] + * // ] + * // Helloooo! first listener + * // event with parameters 1, 2 in second listener + * // event with parameters 1, 2, 3, 4, 5 in third listener + * ``` + * @since v0.1.26 + */ + emit(eventName: string | symbol, ...args: any[]): boolean; + /** + * Returns the number of listeners listening to the event named `eventName`. + * @since v3.2.0 + * @param eventName The name of the event being listened for + */ + listenerCount(eventName: string | symbol): number; + /** + * Adds the `listener` function to the _beginning_ of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.prependListener('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName` to the _beginning_ of the listeners array. The next time `eventName` is triggered, this + * listener is removed, and then invoked. + * + * ```js + * server.prependOnceListener('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependOnceListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Returns an array listing the events for which the emitter has registered + * listeners. The values in the array are strings or `Symbol`s. + * + * ```js + * const EventEmitter = require('events'); + * const myEE = new EventEmitter(); + * myEE.on('foo', () => {}); + * myEE.on('bar', () => {}); + * + * const sym = Symbol('symbol'); + * myEE.on(sym, () => {}); + * + * console.log(myEE.eventNames()); + * // Prints: [ 'foo', 'bar', Symbol(symbol) ] + * ``` + * @since v6.0.0 + */ + eventNames(): Array; + } + } + } + export = EventEmitter; +} +declare module 'node:events' { + import events = require('events'); + export = events; +} diff --git a/node_modules/@types/node/fs.d.ts b/node_modules/@types/node/fs.d.ts new file mode 100755 index 0000000..75c53fb --- /dev/null +++ b/node_modules/@types/node/fs.d.ts @@ -0,0 +1,3872 @@ +/** + * The `fs` module enables interacting with the file system in a + * way modeled on standard POSIX functions. + * + * To use the promise-based APIs: + * + * ```js + * import * as fs from 'fs/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as fs from 'fs'; + * ``` + * + * All file system operations have synchronous, callback, and promise-based + * forms, and are accessible using both CommonJS syntax and ES6 Modules (ESM). + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/fs.js) + */ +declare module 'fs' { + import * as stream from 'node:stream'; + import { Abortable, EventEmitter } from 'node:events'; + import { URL } from 'node:url'; + import * as promises from 'node:fs/promises'; + export { promises }; + /** + * Valid types for path values in "fs". + */ + export type PathLike = string | Buffer | URL; + export type PathOrFileDescriptor = PathLike | number; + export type TimeLike = string | number | Date; + export type NoParamCallback = (err: NodeJS.ErrnoException | null) => void; + export type BufferEncodingOption = + | 'buffer' + | { + encoding: 'buffer'; + }; + export interface ObjectEncodingOptions { + encoding?: BufferEncoding | null | undefined; + } + export type EncodingOption = ObjectEncodingOptions | BufferEncoding | undefined | null; + export type OpenMode = number | string; + export type Mode = number | string; + export interface StatsBase { + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + dev: T; + ino: T; + mode: T; + nlink: T; + uid: T; + gid: T; + rdev: T; + size: T; + blksize: T; + blocks: T; + atimeMs: T; + mtimeMs: T; + ctimeMs: T; + birthtimeMs: T; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + } + export interface Stats extends StatsBase {} + /** + * A `fs.Stats` object provides information about a file. + * + * Objects returned from {@link stat}, {@link lstat} and {@link fstat} and + * their synchronous counterparts are of this type. + * If `bigint` in the `options` passed to those methods is true, the numeric values + * will be `bigint` instead of `number`, and the object will contain additional + * nanosecond-precision properties suffixed with `Ns`. + * + * ```console + * Stats { + * dev: 2114, + * ino: 48064969, + * mode: 33188, + * nlink: 1, + * uid: 85, + * gid: 100, + * rdev: 0, + * size: 527, + * blksize: 4096, + * blocks: 8, + * atimeMs: 1318289051000.1, + * mtimeMs: 1318289051000.1, + * ctimeMs: 1318289051000.1, + * birthtimeMs: 1318289051000.1, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * + * `bigint` version: + * + * ```console + * BigIntStats { + * dev: 2114n, + * ino: 48064969n, + * mode: 33188n, + * nlink: 1n, + * uid: 85n, + * gid: 100n, + * rdev: 0n, + * size: 527n, + * blksize: 4096n, + * blocks: 8n, + * atimeMs: 1318289051000n, + * mtimeMs: 1318289051000n, + * ctimeMs: 1318289051000n, + * birthtimeMs: 1318289051000n, + * atimeNs: 1318289051000000000n, + * mtimeNs: 1318289051000000000n, + * ctimeNs: 1318289051000000000n, + * birthtimeNs: 1318289051000000000n, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * @since v0.1.21 + */ + export class Stats {} + /** + * A representation of a directory entry, which can be a file or a subdirectory + * within the directory, as returned by reading from an `fs.Dir`. The + * directory entry is a combination of the file name and file type pairs. + * + * Additionally, when {@link readdir} or {@link readdirSync} is called with + * the `withFileTypes` option set to `true`, the resulting array is filled with `fs.Dirent` objects, rather than strings or `Buffer` s. + * @since v10.10.0 + */ + export class Dirent { + /** + * Returns `true` if the `fs.Dirent` object describes a regular file. + * @since v10.10.0 + */ + isFile(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a file system + * directory. + * @since v10.10.0 + */ + isDirectory(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a block device. + * @since v10.10.0 + */ + isBlockDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a character device. + * @since v10.10.0 + */ + isCharacterDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a symbolic link. + * @since v10.10.0 + */ + isSymbolicLink(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a first-in-first-out + * (FIFO) pipe. + * @since v10.10.0 + */ + isFIFO(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a socket. + * @since v10.10.0 + */ + isSocket(): boolean; + /** + * The file name that this `fs.Dirent` object refers to. The type of this + * value is determined by the `options.encoding` passed to {@link readdir} or {@link readdirSync}. + * @since v10.10.0 + */ + name: string; + } + /** + * A class representing a directory stream. + * + * Created by {@link opendir}, {@link opendirSync}, or `fsPromises.opendir()`. + * + * ```js + * import { opendir } from 'fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + */ + export class Dir implements AsyncIterable { + /** + * The read-only path of this directory as was provided to {@link opendir},{@link opendirSync}, or `fsPromises.opendir()`. + * @since v12.12.0 + */ + readonly path: string; + /** + * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read. + */ + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Asynchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * + * A promise is returned that will be resolved after the resource has been + * closed. + * @since v12.12.0 + */ + close(): Promise; + close(cb: NoParamCallback): void; + /** + * Synchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * @since v12.12.0 + */ + closeSync(): void; + /** + * Asynchronously read the next directory entry via [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) as an `fs.Dirent`. + * + * A promise is returned that will be resolved with an `fs.Dirent`, or `null`if there are no more directory entries to read. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + * @return containing {fs.Dirent|null} + */ + read(): Promise; + read(cb: (err: NodeJS.ErrnoException | null, dirEnt: Dirent | null) => void): void; + /** + * Synchronously read the next directory entry as an `fs.Dirent`. See the + * POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more detail. + * + * If there are no more directory entries to read, `null` will be returned. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + */ + readSync(): Dirent | null; + } + /** + * Class: fs.StatWatcher + * @since v14.3.0, v12.20.0 + * Extends `EventEmitter` + * A successful call to {@link watchFile} method will return a new fs.StatWatcher object. + */ + export interface StatWatcher extends EventEmitter { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.StatWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.StatWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.StatWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.StatWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + } + export interface FSWatcher extends EventEmitter { + /** + * Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable. + * @since v0.5.8 + */ + close(): void; + /** + * events.EventEmitter + * 1. change + * 2. error + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + addListener(event: 'error', listener: (error: Error) => void): this; + addListener(event: 'close', listener: () => void): this; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + on(event: 'error', listener: (error: Error) => void): this; + on(event: 'close', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + once(event: 'error', listener: (error: Error) => void): this; + once(event: 'close', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + prependListener(event: 'error', listener: (error: Error) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + prependOnceListener(event: 'error', listener: (error: Error) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + } + /** + * Instances of `fs.ReadStream` are created and returned using the {@link createReadStream} function. + * @since v0.1.93 + */ + export class ReadStream extends stream.Readable { + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes that have been read so far. + * @since v6.4.0 + */ + bytesRead: number; + /** + * The path to the file the stream is reading from as specified in the first + * argument to `fs.createReadStream()`. If `path` is passed as a string, then`readStream.path` will be a string. If `path` is passed as a `Buffer`, then`readStream.path` will be a + * `Buffer`. If `fd` is specified, then`readStream.path` will be `undefined`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0, v10.16.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'open', listener: (fd: number) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'readable', listener: () => void): this; + addListener(event: 'ready', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: Buffer | string) => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'open', listener: (fd: number) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'readable', listener: () => void): this; + on(event: 'ready', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: Buffer | string) => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'open', listener: (fd: number) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'readable', listener: () => void): this; + once(event: 'ready', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'open', listener: (fd: number) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'readable', listener: () => void): this; + prependListener(event: 'ready', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'open', listener: (fd: number) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'readable', listener: () => void): this; + prependOnceListener(event: 'ready', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * * Extends `stream.Writable` + * + * Instances of `fs.WriteStream` are created and returned using the {@link createWriteStream} function. + * @since v0.1.93 + */ + export class WriteStream extends stream.Writable { + /** + * Closes `writeStream`. Optionally accepts a + * callback that will be executed once the `writeStream`is closed. + * @since v0.9.4 + */ + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes written so far. Does not include data that is still queued + * for writing. + * @since v0.4.7 + */ + bytesWritten: number; + /** + * The path to the file the stream is writing to as specified in the first + * argument to {@link createWriteStream}. If `path` is passed as a string, then`writeStream.path` will be a string. If `path` is passed as a `Buffer`, then`writeStream.path` will be a + * `Buffer`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'open', listener: (fd: number) => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'ready', listener: () => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'open', listener: (fd: number) => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'ready', listener: () => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'open', listener: (fd: number) => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'ready', listener: () => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'open', listener: (fd: number) => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'ready', listener: () => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'open', listener: (fd: number) => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'ready', listener: () => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * Asynchronously rename file at `oldPath` to the pathname provided + * as `newPath`. In the case that `newPath` already exists, it will + * be overwritten. If there is a directory at `newPath`, an error will + * be raised instead. No arguments other than a possible exception are + * given to the completion callback. + * + * See also: [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html). + * + * ```js + * import { rename } from 'fs'; + * + * rename('oldFile.txt', 'newFile.txt', (err) => { + * if (err) throw err; + * console.log('Rename complete!'); + * }); + * ``` + * @since v0.0.2 + */ + export function rename(oldPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace rename { + /** + * Asynchronous rename(2) - Change the name or location of a file or directory. + * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; + } + /** + * Renames the file from `oldPath` to `newPath`. Returns `undefined`. + * + * See the POSIX [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html) documentation for more details. + * @since v0.1.21 + */ + export function renameSync(oldPath: PathLike, newPath: PathLike): void; + /** + * Truncates the file. No arguments other than a possible exception are + * given to the completion callback. A file descriptor can also be passed as the + * first argument. In this case, `fs.ftruncate()` is called. + * + * ```js + * import { truncate } from 'fs'; + * // Assuming that 'path/file.txt' is a regular file. + * truncate('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was truncated'); + * }); + * ``` + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * + * See the POSIX [`truncate(2)`](http://man7.org/linux/man-pages/man2/truncate.2.html) documentation for more details. + * @since v0.8.6 + * @param [len=0] + */ + export function truncate(path: PathLike, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function truncate(path: PathLike, callback: NoParamCallback): void; + export namespace truncate { + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(path: PathLike, len?: number | null): Promise; + } + /** + * Truncates the file. Returns `undefined`. A file descriptor can also be + * passed as the first argument. In this case, `fs.ftruncateSync()` is called. + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * @since v0.8.6 + * @param [len=0] + */ + export function truncateSync(path: PathLike, len?: number | null): void; + /** + * Truncates the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`ftruncate(2)`](http://man7.org/linux/man-pages/man2/ftruncate.2.html) documentation for more detail. + * + * If the file referred to by the file descriptor was larger than `len` bytes, only + * the first `len` bytes will be retained in the file. + * + * For example, the following program retains only the first four bytes of the + * file: + * + * ```js + * import { open, close, ftruncate } from 'fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('temp.txt', 'r+', (err, fd) => { + * if (err) throw err; + * + * try { + * ftruncate(fd, 4, (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * if (err) throw err; + * } + * }); + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncate(fd: number, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + */ + export function ftruncate(fd: number, callback: NoParamCallback): void; + export namespace ftruncate { + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(fd: number, len?: number | null): Promise; + } + /** + * Truncates the file descriptor. Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link ftruncate}. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncateSync(fd: number, len?: number | null): void; + /** + * Asynchronously changes owner and group of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace chown { + /** + * Asynchronous chown(2) - Change ownership of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Synchronously changes owner and group of a file. Returns `undefined`. + * This is the synchronous version of {@link chown}. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chownSync(path: PathLike, uid: number, gid: number): void; + /** + * Sets the owner of the file. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchown(fd: number, uid: number, gid: number, callback: NoParamCallback): void; + export namespace fchown { + /** + * Asynchronous fchown(2) - Change ownership of a file. + * @param fd A file descriptor. + */ + function __promisify__(fd: number, uid: number, gid: number): Promise; + } + /** + * Sets the owner of the file. Returns `undefined`. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function fchownSync(fd: number, uid: number, gid: number): void; + /** + * Set the owner of the symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more detail. + */ + export function lchown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace lchown { + /** + * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Set the owner for the path. Returns `undefined`. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more details. + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function lchownSync(path: PathLike, uid: number, gid: number): void; + /** + * Changes the access and modification times of a file in the same way as {@link utimes}, with the difference that if the path refers to a symbolic + * link, then the link is not dereferenced: instead, the timestamps of the + * symbolic link itself are changed. + * + * No arguments other than a possible exception are given to the completion + * callback. + * @since v14.5.0, v12.19.0 + */ + export function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace lutimes { + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, + * with the difference that if the path refers to a symbolic link, then the link is not + * dereferenced: instead, the timestamps of the symbolic link itself are changed. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Change the file system timestamps of the symbolic link referenced by `path`. + * Returns `undefined`, or throws an exception when parameters are incorrect or + * the operation fails. This is the synchronous version of {@link lutimes}. + * @since v14.5.0, v12.19.0 + */ + export function lutimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Asynchronously changes the permissions of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * + * ```js + * import { chmod } from 'fs'; + * + * chmod('my_file.txt', 0o775, (err) => { + * if (err) throw err; + * console.log('The permissions for file "my_file.txt" have been changed!'); + * }); + * ``` + * @since v0.1.30 + */ + export function chmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + export namespace chmod { + /** + * Asynchronous chmod(2) - Change permissions of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link chmod}. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * @since v0.6.7 + */ + export function chmodSync(path: PathLike, mode: Mode): void; + /** + * Sets the permissions on the file. No arguments other than a possible exception + * are given to the completion callback. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmod(fd: number, mode: Mode, callback: NoParamCallback): void; + export namespace fchmod { + /** + * Asynchronous fchmod(2) - Change permissions of a file. + * @param fd A file descriptor. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(fd: number, mode: Mode): Promise; + } + /** + * Sets the permissions on the file. Returns `undefined`. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmodSync(fd: number, mode: Mode): void; + /** + * Changes the permissions on a symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + /** @deprecated */ + export namespace lchmod { + /** + * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * Changes the permissions on a symbolic link. Returns `undefined`. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmodSync(path: PathLike, mode: Mode): void; + /** + * Asynchronous [`stat(2)`](http://man7.org/linux/man-pages/man2/stat.2.html). The callback gets two arguments `(err, stats)` where`stats` is an `fs.Stats` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * + * Using `fs.stat()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()` or `fs.writeFile()` is not recommended. + * Instead, user code should open/read/write the file directly and handle the + * error raised if the file is not available. + * + * To check if a file exists without manipulating it afterwards, {@link access} is recommended. + * + * For example, given the following directory structure: + * + * ```text + * - txtDir + * -- file.txt + * - app.js + * ``` + * + * The next program will check for the stats of the given paths: + * + * ```js + * import { stat } from 'fs'; + * + * const pathsToCheck = ['./txtDir', './txtDir/file.txt']; + * + * for (let i = 0; i < pathsToCheck.length; i++) { + * stat(pathsToCheck[i], (err, stats) => { + * console.log(stats.isDirectory()); + * console.log(stats); + * }); + * } + * ``` + * + * The resulting output will resemble: + * + * ```console + * true + * Stats { + * dev: 16777220, + * mode: 16877, + * nlink: 3, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214262, + * size: 96, + * blocks: 0, + * atimeMs: 1561174653071.963, + * mtimeMs: 1561174614583.3518, + * ctimeMs: 1561174626623.5366, + * birthtimeMs: 1561174126937.2893, + * atime: 2019-06-22T03:37:33.072Z, + * mtime: 2019-06-22T03:36:54.583Z, + * ctime: 2019-06-22T03:37:06.624Z, + * birthtime: 2019-06-22T03:28:46.937Z + * } + * false + * Stats { + * dev: 16777220, + * mode: 33188, + * nlink: 1, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214074, + * size: 8, + * blocks: 8, + * atimeMs: 1561174616618.8555, + * mtimeMs: 1561174614584, + * ctimeMs: 1561174614583.8145, + * birthtimeMs: 1561174007710.7478, + * atime: 2019-06-22T03:36:56.619Z, + * mtime: 2019-06-22T03:36:54.584Z, + * ctime: 2019-06-22T03:36:54.584Z, + * birthtime: 2019-06-22T03:26:47.711Z + * } + * ``` + * @since v0.0.2 + */ + export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function stat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void + ): void; + export function stat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void + ): void; + export function stat(path: PathLike, options: StatOptions | undefined, callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void): void; + export namespace stat { + /** + * Asynchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + } + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + export interface StatSyncFn extends Function { + (path: PathLike, options?: undefined): Stats; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + throwIfNoEntry: false; + } + ): Stats | undefined; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + throwIfNoEntry: false; + } + ): BigIntStats | undefined; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + } + ): Stats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + } + ): BigIntStats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: boolean; + throwIfNoEntry?: false | undefined; + } + ): Stats | BigIntStats; + (path: PathLike, options?: StatSyncOptions): Stats | BigIntStats | undefined; + } + /** + * Synchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const statSync: StatSyncFn; + /** + * Invokes the callback with the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstat(fd: number, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function fstat( + fd: number, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void + ): void; + export function fstat( + fd: number, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void + ): void; + export function fstat(fd: number, options: StatOptions | undefined, callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void): void; + export namespace fstat { + /** + * Asynchronous fstat(2) - Get file status. + * @param fd A file descriptor. + */ + function __promisify__( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function __promisify__( + fd: number, + options: StatOptions & { + bigint: true; + } + ): Promise; + function __promisify__(fd: number, options?: StatOptions): Promise; + } + /** + * Retrieves the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstatSync( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Stats; + export function fstatSync( + fd: number, + options: StatOptions & { + bigint: true; + } + ): BigIntStats; + export function fstatSync(fd: number, options?: StatOptions): Stats | BigIntStats; + /** + * Retrieves the `fs.Stats` for the symbolic link referred to by the path. + * The callback gets two arguments `(err, stats)` where `stats` is a `fs.Stats` object. `lstat()` is identical to `stat()`, except that if `path` is a symbolic + * link, then the link itself is stat-ed, not the file that it refers to. + * + * See the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) documentation for more details. + * @since v0.1.30 + */ + export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function lstat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void + ): void; + export function lstat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void + ): void; + export function lstat(path: PathLike, options: StatOptions | undefined, callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void): void; + export namespace lstat { + /** + * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + } + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + /** + * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const lstatSync: StatSyncFn; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. No arguments other than + * a possible + * exception are given to the completion callback. + * @since v0.1.31 + */ + export function link(existingPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace link { + /** + * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. + * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(existingPath: PathLike, newPath: PathLike): Promise; + } + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.31 + */ + export function linkSync(existingPath: PathLike, newPath: PathLike): void; + /** + * Creates the link called `path` pointing to `target`. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`symlink(2)`](http://man7.org/linux/man-pages/man2/symlink.2.html) documentation for more details. + * + * The `type` argument is only available on Windows and ignored on other platforms. + * It can be set to `'dir'`, `'file'`, or `'junction'`. If the `type` argument is + * not set, Node.js will autodetect `target` type and use `'file'` or `'dir'`. If + * the `target` does not exist, `'file'` will be used. Windows junction points + * require the destination path to be absolute. When using `'junction'`, the`target` argument will automatically be normalized to absolute path. + * + * Relative targets are relative to the link’s parent directory. + * + * ```js + * import { symlink } from 'fs'; + * + * symlink('./mew', './mewtwo', callback); + * ``` + * + * The above example creates a symbolic link `mewtwo` which points to `mew` in the + * same directory: + * + * ```bash + * $ tree . + * . + * ├── mew + * └── mewtwo -> ./mew + * ``` + * @since v0.1.31 + */ + export function symlink(target: PathLike, path: PathLike, type: symlink.Type | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + */ + export function symlink(target: PathLike, path: PathLike, callback: NoParamCallback): void; + export namespace symlink { + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). + * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. + */ + function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; + type Type = 'dir' | 'file' | 'junction'; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link symlink}. + * @since v0.1.31 + */ + export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; + /** + * Reads the contents of the symbolic link referred to by `path`. The callback gets + * two arguments `(err, linkString)`. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path passed to the callback. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlink(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, linkString: string) => void): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink(path: PathLike, options: BufferEncodingOption, callback: (err: NodeJS.ErrnoException | null, linkString: Buffer) => void): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, linkString: string | Buffer) => void): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readlink(path: PathLike, callback: (err: NodeJS.ErrnoException | null, linkString: string) => void): void; + export namespace readlink { + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + } + /** + * Returns the symbolic link's string value. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string | Buffer; + /** + * Asynchronously computes the canonical pathname by resolving `.`, `..` and + * symbolic links. + * + * A canonical pathname is not necessarily unique. Hard links and bind mounts can + * expose a file system entity through many pathnames. + * + * This function behaves like [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html), with some exceptions: + * + * 1. No case conversion is performed on case-insensitive file systems. + * 2. The maximum number of symbolic links is platform-independent and generally + * (much) higher than what the native [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html) implementation supports. + * + * The `callback` gets two arguments `(err, resolvedPath)`. May use `process.cwd`to resolve relative paths. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * If `path` resolves to a socket or a pipe, the function will return a system + * dependent name for that object. + * @since v0.1.31 + */ + export function realpath(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath(path: PathLike, options: BufferEncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function realpath(path: PathLike, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + export namespace realpath { + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html). + * + * The `callback` gets two arguments `(err, resolvedPath)`. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v9.2.0 + */ + function native(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + function native(path: PathLike, options: BufferEncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void): void; + function native(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void): void; + function native(path: PathLike, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + } + /** + * Returns the resolved pathname. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link realpath}. + * @since v0.1.31 + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string | Buffer; + export namespace realpathSync { + function native(path: PathLike, options?: EncodingOption): string; + function native(path: PathLike, options: BufferEncodingOption): Buffer; + function native(path: PathLike, options?: EncodingOption): string | Buffer; + } + /** + * Asynchronously removes a file or symbolic link. No arguments other than a + * possible exception are given to the completion callback. + * + * ```js + * import { unlink } from 'fs'; + * // Assuming that 'path/file.txt' is a regular file. + * unlink('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was deleted'); + * }); + * ``` + * + * `fs.unlink()` will not work on a directory, empty or otherwise. To remove a + * directory, use {@link rmdir}. + * + * See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more details. + * @since v0.0.2 + */ + export function unlink(path: PathLike, callback: NoParamCallback): void; + export namespace unlink { + /** + * Asynchronous unlink(2) - delete a name and possibly the file it refers to. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Synchronous [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html). Returns `undefined`. + * @since v0.1.21 + */ + export function unlinkSync(path: PathLike): void; + export interface RmDirOptions { + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * @deprecated since v14.14.0 In future versions of Node.js and will trigger a warning + * `fs.rmdir(path, { recursive: true })` will throw if `path` does not exist or is a file. + * Use `fs.rm(path, { recursive: true, force: true })` instead. + * + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). No arguments other than a possible exception are given + * to the completion callback. + * + * Using `fs.rmdir()` on a file (not a directory) results in an `ENOENT` error on + * Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rm} with options `{ recursive: true, force: true }`. + * @since v0.0.2 + */ + export function rmdir(path: PathLike, callback: NoParamCallback): void; + export function rmdir(path: PathLike, options: RmDirOptions, callback: NoParamCallback): void; + export namespace rmdir { + /** + * Asynchronous rmdir(2) - delete a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, options?: RmDirOptions): Promise; + } + /** + * Synchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). Returns `undefined`. + * + * Using `fs.rmdirSync()` on a file (not a directory) results in an `ENOENT` error + * on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rmSync} with options `{ recursive: true, force: true }`. + * @since v0.1.21 + */ + export function rmdirSync(path: PathLike, options?: RmDirOptions): void; + export interface RmOptions { + /** + * When `true`, exceptions will be ignored if `path` does not exist. + * @default false + */ + force?: boolean | undefined; + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm(path: PathLike, callback: NoParamCallback): void; + export function rm(path: PathLike, options: RmOptions, callback: NoParamCallback): void; + export namespace rm { + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). + */ + function __promisify__(path: PathLike, options?: RmOptions): Promise; + } + /** + * Synchronously removes files and directories (modeled on the standard POSIX `rm`utility). Returns `undefined`. + * @since v14.14.0 + */ + export function rmSync(path: PathLike, options?: RmOptions): void; + export interface MakeDirectoryOptions { + /** + * Indicates whether parent folders should be created. + * If a folder was created, the path to the first created folder will be returned. + * @default false + */ + recursive?: boolean | undefined; + /** + * A file mode. If a string is passed, it is parsed as an octal integer. If not specified + * @default 0o777 + */ + mode?: Mode | undefined; + } + /** + * Asynchronously creates a directory. + * + * The callback is given a possible exception and, if `recursive` is `true`, the + * first directory path created, `(err[, path])`.`path` can still be `undefined` when `recursive` is `true`, if no directory was + * created. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fs.mkdir()` when `path` is a directory that + * exists results in an error only + * when `recursive` is false. + * + * ```js + * import { mkdir } from 'fs'; + * + * // Creates /tmp/a/apple, regardless of whether `/tmp` and /tmp/a exist. + * mkdir('/tmp/a/apple', { recursive: true }, (err) => { + * if (err) throw err; + * }); + * ``` + * + * On Windows, using `fs.mkdir()` on the root directory even with recursion will + * result in an error: + * + * ```js + * import { mkdir } from 'fs'; + * + * mkdir('/', { recursive: true }, (err) => { + * // => [Error: EPERM: operation not permitted, mkdir 'C:\'] + * }); + * ``` + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.8 + */ + export function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + | undefined, + callback: NoParamCallback + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir(path: PathLike, options: Mode | MakeDirectoryOptions | null | undefined, callback: (err: NodeJS.ErrnoException | null, path?: string) => void): void; + /** + * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function mkdir(path: PathLike, callback: NoParamCallback): void; + export namespace mkdir { + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + } + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + } + /** + * Synchronously creates a directory. Returns `undefined`, or if `recursive` is`true`, the first directory path created. + * This is the synchronous version of {@link mkdir}. + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.21 + */ + export function mkdirSync( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + } + ): string | undefined; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + ): void; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync(path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined; + /** + * Creates a unique temporary directory. + * + * Generates six random characters to be appended behind a required`prefix` to create a unique temporary directory. Due to platform + * inconsistencies, avoid trailing `X` characters in `prefix`. Some platforms, + * notably the BSDs, can return more than six random characters, and replace + * trailing `X` characters in `prefix` with random characters. + * + * The created directory path is passed as a string to the callback's second + * parameter. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'fs'; + * + * mkdtemp(path.join(os.tmpdir(), 'foo-'), (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Prints: /tmp/foo-itXde2 or C:\Users\...\AppData\Local\Temp\foo-itXde2 + * }); + * ``` + * + * The `fs.mkdtemp()` method will append the six randomly selected characters + * directly to the `prefix` string. For instance, given a directory `/tmp`, if the + * intention is to create a temporary directory _within_`/tmp`, the `prefix`must end with a trailing platform-specific path separator + * (`require('path').sep`). + * + * ```js + * import { tmpdir } from 'os'; + * import { mkdtemp } from 'fs'; + * + * // The parent directory for the new temporary directory + * const tmpDir = tmpdir(); + * + * // This method is *INCORRECT*: + * mkdtemp(tmpDir, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmpabc123`. + * // A new temporary directory is created at the file system root + * // rather than *within* the /tmp directory. + * }); + * + * // This method is *CORRECT*: + * import { sep } from 'path'; + * mkdtemp(`${tmpDir}${sep}`, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmp/abc123`. + * // A new temporary directory is created within + * // the /tmp directory. + * }); + * ``` + * @since v5.10.0 + */ + export function mkdtemp(prefix: string, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, folder: string) => void): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: + | 'buffer' + | { + encoding: 'buffer'; + }, + callback: (err: NodeJS.ErrnoException | null, folder: Buffer) => void + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp(prefix: string, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, folder: string | Buffer) => void): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + */ + export function mkdtemp(prefix: string, callback: (err: NodeJS.ErrnoException | null, folder: string) => void): void; + export namespace mkdtemp { + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + } + /** + * Returns the created directory path. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link mkdtemp}. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * @since v5.10.0 + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options: BufferEncodingOption): Buffer; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string | Buffer; + /** + * Reads the contents of a directory. The callback gets two arguments `(err, files)`where `files` is an array of the names of the files in the directory excluding`'.'` and `'..'`. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames passed to the callback. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the `files` array will contain `fs.Dirent` objects. + * @since v0.1.8 + */ + export function readdir( + path: PathLike, + options: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + } + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + | 'buffer', + callback: (err: NodeJS.ErrnoException | null, files: Buffer[]) => void + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[] | Buffer[]) => void + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readdir(path: PathLike, callback: (err: NodeJS.ErrnoException | null, files: string[]) => void): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + }, + callback: (err: NodeJS.ErrnoException | null, files: Dirent[]) => void + ): void; + export namespace readdir { + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + } + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options: + | 'buffer' + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent + */ + function __promisify__( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + } + ): Promise; + } + /** + * Reads the contents of the directory. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames returned. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the result will contain `fs.Dirent` objects. + * @since v0.1.21 + */ + export function readdirSync( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + } + | BufferEncoding + | null + ): string[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options: + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + | 'buffer' + ): Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): string[] | Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdirSync( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + } + ): Dirent[]; + /** + * Closes the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * Calling `fs.close()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.0.2 + */ + export function close(fd: number, callback?: NoParamCallback): void; + export namespace close { + /** + * Asynchronous close(2) - close a file descriptor. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Closes the file descriptor. Returns `undefined`. + * + * Calling `fs.closeSync()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.1.21 + */ + export function closeSync(fd: number): void; + /** + * Asynchronous file open. See the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more details. + * + * `mode` sets the file mode (permission and sticky bits), but only if the file was + * created. On Windows, only the write permission can be manipulated; see {@link chmod}. + * + * The callback gets two arguments `(err, fd)`. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * + * Functions based on `fs.open()` exhibit this behavior as well:`fs.writeFile()`, `fs.readFile()`, etc. + * @since v0.0.2 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] + */ + export function open(path: PathLike, flags: OpenMode | undefined, mode: Mode | undefined | null, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param [flags='r'] See `support of file system `flags``. + */ + export function open(path: PathLike, flags: OpenMode | undefined, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function open(path: PathLike, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + + export namespace open { + /** + * Asynchronous open(2) - open and possibly create a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. + */ + function __promisify__(path: PathLike, flags: OpenMode, mode?: Mode | null): Promise; + } + /** + * Returns an integer representing the file descriptor. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link open}. + * @since v0.1.21 + * @param [flags='r'] + * @param [mode=0o666] + */ + export function openSync(path: PathLike, flags: OpenMode, mode?: Mode | null): number; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time in seconds,`Date`s, or a numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity` or`-Infinity`, an `Error` will be thrown. + * @since v0.4.2 + */ + export function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace utimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied path. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link utimes}. + * @since v0.4.2 + */ + export function utimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Change the file system timestamps of the object referenced by the supplied file + * descriptor. See {@link utimes}. + * @since v0.4.2 + */ + export function futimes(fd: number, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace futimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(fd: number, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Synchronous version of {@link futimes}. Returns `undefined`. + * @since v0.4.2 + */ + export function futimesSync(fd: number, atime: TimeLike, mtime: TimeLike): void; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. No arguments other + * than a possible exception are given to the completion callback. + * @since v0.1.96 + */ + export function fsync(fd: number, callback: NoParamCallback): void; + export namespace fsync { + /** + * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.96 + */ + export function fsyncSync(fd: number): void; + /** + * Write `buffer` to the file specified by `fd`. + * + * `offset` determines the part of the buffer to be written, and `length` is + * an integer specifying the number of bytes to write. + * + * `position` refers to the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. See [`pwrite(2)`](http://man7.org/linux/man-pages/man2/pwrite.2.html). + * + * The callback will be given three arguments `(err, bytesWritten, buffer)` where`bytesWritten` specifies how many _bytes_ were written from `buffer`. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesWritten` and `buffer` properties. + * + * It is unsafe to use `fs.write()` multiple times on the same file without waiting + * for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v0.0.2 + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + */ + export function write(fd: number, buffer: TBuffer, callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + encoding: BufferEncoding | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + export function write(fd: number, string: string, position: number | undefined | null, callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + */ + export function write(fd: number, string: string, callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void): void; + export namespace write { + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + function __promisify__( + fd: number, + buffer?: TBuffer, + offset?: number, + length?: number, + position?: number | null + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + function __promisify__( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link write}. + * @since v0.1.21 + * @return The number of bytes written. + */ + export function writeSync(fd: number, buffer: NodeJS.ArrayBufferView, offset?: number | null, length?: number | null, position?: number | null): number; + /** + * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function writeSync(fd: number, string: string, position?: number | null, encoding?: BufferEncoding | null): number; + export type ReadPosition = number | bigint; + export interface ReadSyncOptions { + /** + * @default 0 + */ + offset?: number | undefined; + /** + * @default `length of buffer` + */ + length?: number | undefined; + /** + * @default null + */ + position?: ReadPosition | null | undefined; + } + export interface ReadAsyncOptions extends ReadSyncOptions { + buffer?: TBuffer; + } + /** + * Read data from the file specified by `fd`. + * + * The callback is given the three arguments, `(err, bytesRead, buffer)`. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffer` properties. + * @since v0.0.2 + * @param buffer The buffer that the data will be written to. + * @param offset The position in `buffer` to write the data to. + * @param length The number of bytes to read. + * @param position Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If + * `position` is an integer, the file position will be unchanged. + */ + export function read( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: ReadPosition | null, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void + ): void; + /** + * Similar to the above `fs.read` function, this version takes an optional `options` object. + * If not otherwise specified in an `options` object, + * `buffer` defaults to `Buffer.alloc(16384)`, + * `offset` defaults to `0`, + * `length` defaults to `buffer.byteLength`, `- offset` as of Node 17.6.0 + * `position` defaults to `null` + * @since v12.17.0, 13.11.0 + */ + export function read( + fd: number, + options: ReadAsyncOptions, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void + ): void; + export function read(fd: number, callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void): void; + export namespace read { + /** + * @param fd A file descriptor. + * @param buffer The buffer that the data will be written to. + * @param offset The offset in the buffer at which to start writing. + * @param length The number of bytes to read. + * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. + */ + function __promisify__( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: number | null + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__( + fd: number, + options: ReadAsyncOptions + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__(fd: number): Promise<{ + bytesRead: number; + buffer: NodeJS.ArrayBufferView; + }>; + } + /** + * Returns the number of `bytesRead`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link read}. + * @since v0.1.21 + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, offset: number, length: number, position: ReadPosition | null): number; + /** + * Similar to the above `fs.readSync` function, this version takes an optional `options` object. + * If no `options` object is specified, it will default with the above values. + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, opts?: ReadSyncOptions): number; + /** + * Asynchronously reads the entire contents of a file. + * + * ```js + * import { readFile } from 'fs'; + * + * readFile('/etc/passwd', (err, data) => { + * if (err) throw err; + * console.log(data); + * }); + * ``` + * + * The callback is passed two arguments `(err, data)`, where `data` is the + * contents of the file. + * + * If no encoding is specified, then the raw buffer is returned. + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { readFile } from 'fs'; + * + * readFile('/etc/passwd', 'utf8', callback); + * ``` + * + * When the path is a directory, the behavior of `fs.readFile()` and {@link readFileSync} is platform-specific. On macOS, Linux, and Windows, an + * error will be returned. On FreeBSD, a representation of the directory's contents + * will be returned. + * + * ```js + * import { readFile } from 'fs'; + * + * // macOS, Linux, and Windows + * readFile('', (err, data) => { + * // => [Error: EISDIR: illegal operation on a directory, read ] + * }); + * + * // FreeBSD + * readFile('', (err, data) => { + * // => null, + * }); + * ``` + * + * It is possible to abort an ongoing request using an `AbortSignal`. If a + * request is aborted the callback is called with an `AbortError`: + * + * ```js + * import { readFile } from 'fs'; + * + * const controller = new AbortController(); + * const signal = controller.signal; + * readFile(fileInfo[0].name, { signal }, (err, buf) => { + * // ... + * }); + * // When you want to abort the request + * controller.abort(); + * ``` + * + * The `fs.readFile()` function buffers the entire file. To minimize memory costs, + * when possible prefer streaming via `fs.createReadStream()`. + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * @since v0.1.29 + * @param path filename or file descriptor + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding?: null | undefined; + flag?: string | undefined; + } & Abortable) + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding: BufferEncoding; + flag?: string | undefined; + } & Abortable) + | BufferEncoding, + callback: (err: NodeJS.ErrnoException | null, data: string) => void + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | (ObjectEncodingOptions & { + flag?: string | undefined; + } & Abortable) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: string | Buffer) => void + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + */ + export function readFile(path: PathOrFileDescriptor, callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void): void; + export namespace readFile { + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null + ): Promise; + } + /** + * Returns the contents of the `path`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readFile}. + * + * If the `encoding` option is specified then this function returns a + * string. Otherwise it returns a buffer. + * + * Similar to {@link readFile}, when the path is a directory, the behavior of`fs.readFileSync()` is platform-specific. + * + * ```js + * import { readFileSync } from 'fs'; + * + * // macOS, Linux, and Windows + * readFileSync(''); + * // => [Error: EISDIR: illegal operation on a directory, read ] + * + * // FreeBSD + * readFileSync(''); // => + * ``` + * @since v0.1.8 + * @param path filename or file descriptor + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null + ): Buffer; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding + ): string; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null + ): string | Buffer; + export type WriteFileOptions = + | (ObjectEncodingOptions & + Abortable & { + mode?: Mode | undefined; + flag?: string | undefined; + }) + | BufferEncoding + | null; + /** + * When `file` is a filename, asynchronously writes data to the file, replacing the + * file if it already exists. `data` can be a string or a buffer. + * + * When `file` is a file descriptor, the behavior is similar to calling`fs.write()` directly (which is recommended). See the notes below on using + * a file descriptor. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { writeFile } from 'fs'; + * import { Buffer } from 'buffer'; + * + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, (err) => { + * if (err) throw err; + * console.log('The file has been saved!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { writeFile } from 'fs'; + * + * writeFile('message.txt', 'Hello Node.js', 'utf8', callback); + * ``` + * + * It is unsafe to use `fs.writeFile()` multiple times on the same file without + * waiting for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * Similarly to `fs.readFile` \- `fs.writeFile` is a convenience method that + * performs multiple `write` calls internally to write the buffer passed to it. + * For performance sensitive code consider using {@link createWriteStream}. + * + * It is possible to use an `AbortSignal` to cancel an `fs.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'fs'; + * import { Buffer } from 'buffer'; + * + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, { signal }, (err) => { + * // When a request is aborted - the callback is called with an AbortError + * }); + * // When the request should be aborted + * controller.abort(); + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFile(file: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options: WriteFileOptions, callback: NoParamCallback): void; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function writeFile(path: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, callback: NoParamCallback): void; + export namespace writeFile { + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'w'` is used. + */ + function __promisify__(path: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options?: WriteFileOptions): Promise; + } + /** + * Returns `undefined`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writeFile}. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFileSync(file: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options?: WriteFileOptions): void; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFile } from 'fs'; + * + * appendFile('message.txt', 'data to append', (err) => { + * if (err) throw err; + * console.log('The "data to append" was appended to file!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFile } from 'fs'; + * + * appendFile('message.txt', 'data to append', 'utf8', callback); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { open, close, appendFile } from 'fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('message.txt', 'a', (err, fd) => { + * if (err) throw err; + * + * try { + * appendFile(fd, 'data to append', 'utf8', (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * throw err; + * } + * }); + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFile(path: PathOrFileDescriptor, data: string | Uint8Array, options: WriteFileOptions, callback: NoParamCallback): void; + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function appendFile(file: PathOrFileDescriptor, data: string | Uint8Array, callback: NoParamCallback): void; + export namespace appendFile { + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'a'` is used. + */ + function __promisify__(file: PathOrFileDescriptor, data: string | Uint8Array, options?: WriteFileOptions): Promise; + } + /** + * Synchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFileSync } from 'fs'; + * + * try { + * appendFileSync('message.txt', 'data to append'); + * console.log('The "data to append" was appended to file!'); + * } catch (err) { + * // Handle the error + * } + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFileSync } from 'fs'; + * + * appendFileSync('message.txt', 'data to append', 'utf8'); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { openSync, closeSync, appendFileSync } from 'fs'; + * + * let fd; + * + * try { + * fd = openSync('message.txt', 'a'); + * appendFileSync(fd, 'data to append', 'utf8'); + * } catch (err) { + * // Handle the error + * } finally { + * if (fd !== undefined) + * closeSync(fd); + * } + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFileSync(path: PathOrFileDescriptor, data: string | Uint8Array, options?: WriteFileOptions): void; + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export interface WatchFileOptions { + bigint?: boolean | undefined; + persistent?: boolean | undefined; + interval?: number | undefined; + } + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint?: false | undefined; + }) + | undefined, + listener: (curr: Stats, prev: Stats) => void + ): StatWatcher; + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint: true; + }) + | undefined, + listener: (curr: BigIntStats, prev: BigIntStats) => void + ): StatWatcher; + /** + * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watchFile(filename: PathLike, listener: (curr: Stats, prev: Stats) => void): StatWatcher; + /** + * Stop watching for changes on `filename`. If `listener` is specified, only that + * particular listener is removed. Otherwise, _all_ listeners are removed, + * effectively stopping watching of `filename`. + * + * Calling `fs.unwatchFile()` with a filename that is not being watched is a + * no-op, not an error. + * + * Using {@link watch} is more efficient than `fs.watchFile()` and`fs.unwatchFile()`. `fs.watch()` should be used instead of `fs.watchFile()`and `fs.unwatchFile()` when possible. + * @since v0.1.31 + * @param listener Optional, a listener previously attached using `fs.watchFile()` + */ + export function unwatchFile(filename: PathLike, listener?: (curr: Stats, prev: Stats) => void): void; + export interface WatchOptions extends Abortable { + encoding?: BufferEncoding | 'buffer' | undefined; + persistent?: boolean | undefined; + recursive?: boolean | undefined; + } + export type WatchEventType = 'rename' | 'change'; + export type WatchListener = (event: WatchEventType, filename: T) => void; + /** + * Watch for changes on `filename`, where `filename` is either a file or a + * directory. + * + * The second argument is optional. If `options` is provided as a string, it + * specifies the `encoding`. Otherwise `options` should be passed as an object. + * + * The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file + * which triggered the event. + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`. + * + * If a `signal` is passed, aborting the corresponding AbortController will close + * the returned `fs.FSWatcher`. + * @since v0.5.10 + * @param listener + */ + export function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: 'buffer'; + }) + | 'buffer', + listener?: WatchListener + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch(filename: PathLike, options?: WatchOptions | BufferEncoding | null, listener?: WatchListener): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch(filename: PathLike, options: WatchOptions | string, listener?: WatchListener): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watch(filename: PathLike, listener?: WatchListener): FSWatcher; + /** + * Test whether or not the given path exists by checking with the file system. + * Then call the `callback` argument with either true or false: + * + * ```js + * import { exists } from 'fs'; + * + * exists('/etc/passwd', (e) => { + * console.log(e ? 'it exists' : 'no passwd!'); + * }); + * ``` + * + * **The parameters for this callback are not consistent with other Node.js** + * **callbacks.** Normally, the first parameter to a Node.js callback is an `err`parameter, optionally followed by other parameters. The `fs.exists()` callback + * has only one boolean parameter. This is one reason `fs.access()` is recommended + * instead of `fs.exists()`. + * + * Using `fs.exists()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()` or `fs.writeFile()` is not recommended. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file does not exist. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { exists, open, close } from 'fs'; + * + * exists('myfile', (e) => { + * if (e) { + * console.error('myfile already exists'); + * } else { + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { open, close, exists } from 'fs'; + * + * exists('myfile', (e) => { + * if (e) { + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } else { + * console.error('myfile does not exist'); + * } + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for existence and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the existence of a file only if the file won’t be + * used directly, for example when its existence is a signal from another + * process. + * @since v0.0.2 + * @deprecated Since v1.0.0 - Use {@link stat} or {@link access} instead. + */ + export function exists(path: PathLike, callback: (exists: boolean) => void): void; + /** @deprecated */ + export namespace exists { + /** + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Returns `true` if the path exists, `false` otherwise. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link exists}. + * + * `fs.exists()` is deprecated, but `fs.existsSync()` is not. The `callback`parameter to `fs.exists()` accepts parameters that are inconsistent with other + * Node.js callbacks. `fs.existsSync()` does not use a callback. + * + * ```js + * import { existsSync } from 'fs'; + * + * if (existsSync('/etc/passwd')) + * console.log('The path exists.'); + * ``` + * @since v0.1.21 + */ + export function existsSync(path: PathLike): boolean; + export namespace constants { + // File Access Constants + /** Constant for fs.access(). File is visible to the calling process. */ + const F_OK: number; + /** Constant for fs.access(). File can be read by the calling process. */ + const R_OK: number; + /** Constant for fs.access(). File can be written by the calling process. */ + const W_OK: number; + /** Constant for fs.access(). File can be executed by the calling process. */ + const X_OK: number; + // File Copy Constants + /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ + const COPYFILE_EXCL: number; + /** + * Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. + */ + const COPYFILE_FICLONE: number; + /** + * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then the operation will fail with an error. + */ + const COPYFILE_FICLONE_FORCE: number; + // File Open Constants + /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ + const O_RDONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ + const O_WRONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ + const O_RDWR: number; + /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ + const O_CREAT: number; + /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ + const O_EXCL: number; + /** + * Constant for fs.open(). Flag indicating that if path identifies a terminal device, + * opening the path shall not cause that terminal to become the controlling terminal for the process + * (if the process does not already have one). + */ + const O_NOCTTY: number; + /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ + const O_TRUNC: number; + /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ + const O_APPEND: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ + const O_DIRECTORY: number; + /** + * constant for fs.open(). + * Flag indicating reading accesses to the file system will no longer result in + * an update to the atime information associated with the file. + * This flag is available on Linux operating systems only. + */ + const O_NOATIME: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ + const O_NOFOLLOW: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ + const O_SYNC: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ + const O_DSYNC: number; + /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ + const O_SYMLINK: number; + /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ + const O_DIRECT: number; + /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ + const O_NONBLOCK: number; + // File Type Constants + /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ + const S_IFMT: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ + const S_IFREG: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ + const S_IFDIR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ + const S_IFCHR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ + const S_IFBLK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ + const S_IFIFO: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ + const S_IFLNK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ + const S_IFSOCK: number; + // File Mode Constants + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ + const S_IRWXU: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ + const S_IRUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ + const S_IWUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ + const S_IXUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ + const S_IRWXG: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ + const S_IRGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ + const S_IWGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ + const S_IXGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ + const S_IRWXO: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ + const S_IROTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ + const S_IWOTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ + const S_IXOTH: number; + /** + * When set, a memory file mapping is used to access the file. This flag + * is available on Windows operating systems only. On other operating systems, + * this flag is ignored. + */ + const UV_FS_O_FILEMAP: number; + } + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * The final argument, `callback`, is a callback function that is invoked with + * a possible error argument. If any of the accessibility checks fail, the error + * argument will be an `Error` object. The following examples check if`package.json` exists, and if it is readable or writable. + * + * ```js + * import { access, constants } from 'fs'; + * + * const file = 'package.json'; + * + * // Check if the file exists in the current directory. + * access(file, constants.F_OK, (err) => { + * console.log(`${file} ${err ? 'does not exist' : 'exists'}`); + * }); + * + * // Check if the file is readable. + * access(file, constants.R_OK, (err) => { + * console.log(`${file} ${err ? 'is not readable' : 'is readable'}`); + * }); + * + * // Check if the file is writable. + * access(file, constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not writable' : 'is writable'}`); + * }); + * + * // Check if the file is readable and writable. + * access(file, constants.R_OK | constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not' : 'is'} readable and writable`); + * }); + * ``` + * + * Do not use `fs.access()` to check for the accessibility of a file before calling`fs.open()`, `fs.readFile()` or `fs.writeFile()`. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file is not accessible. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'fs'; + * + * access('myfile', (err) => { + * if (!err) { + * console.error('myfile already exists'); + * return; + * } + * + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'fs'; + * access('myfile', (err) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for accessibility and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the accessibility of a file only if the file will not be + * used directly, for example when its accessibility is a signal from another + * process. + * + * On Windows, access-control policies (ACLs) on a directory may limit access to + * a file or directory. The `fs.access()` function, however, does not check the + * ACL and therefore may report that a path is accessible even if the ACL restricts + * the user from reading or writing to it. + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function access(path: PathLike, mode: number | undefined, callback: NoParamCallback): void; + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function access(path: PathLike, callback: NoParamCallback): void; + export namespace access { + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike, mode?: number): Promise; + } + /** + * Synchronously tests a user's permissions for the file or directory specified + * by `path`. The `mode` argument is an optional integer that specifies the + * accessibility checks to be performed. `mode` should be either the value`fs.constants.F_OK` or a mask consisting of the bitwise OR of any of`fs.constants.R_OK`, `fs.constants.W_OK`, and + * `fs.constants.X_OK` (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If any of the accessibility checks fail, an `Error` will be thrown. Otherwise, + * the method will return `undefined`. + * + * ```js + * import { accessSync, constants } from 'fs'; + * + * try { + * accessSync('etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can read/write'); + * } catch (err) { + * console.error('no access!'); + * } + * ``` + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function accessSync(path: PathLike, mode?: number): void; + interface StreamOptions { + flags?: string | undefined; + encoding?: BufferEncoding | undefined; + fd?: number | promises.FileHandle | undefined; + mode?: number | undefined; + autoClose?: boolean | undefined; + /** + * @default false + */ + emitClose?: boolean | undefined; + start?: number | undefined; + highWaterMark?: number | undefined; + } + interface ReadStreamOptions extends StreamOptions { + end?: number | undefined; + } + /** + * Unlike the 16 kb default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 kb. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `fd` is specified and `start` is + * omitted or `undefined`, `fs.createReadStream()` reads sequentially from the + * current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `fd` is specified, `ReadStream` will ignore the `path` argument and will use + * the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s should be passed to `net.Socket`. + * + * If `fd` points to a character device that only supports blocking reads + * (such as keyboard or sound card), read operations do not finish until data is + * available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option, it is possible to override the corresponding `fs`implementations for `open`, `read`, and `close`. When providing the `fs` option, + * an override for `read` is required. If no `fd` is provided, an override for`open` is also required. If `autoClose` is `true`, an override for `close` is + * also required. + * + * ```js + * import { createReadStream } from 'fs'; + * + * // Create a stream from some character device. + * const stream = createReadStream('/dev/input/event0'); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * `mode` sets the file mode (permission and sticky bits), but only if the + * file was created. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { createReadStream } from 'fs'; + * + * createReadStream('sample.txt', { start: 90, end: 99 }); + * ``` + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createReadStream(path: PathLike, options?: BufferEncoding | ReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` option to be set to `r+` rather than the + * default `w`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option it is possible to override the corresponding `fs`implementations for `open`, `write`, `writev` and `close`. Overriding `write()`without `writev()` can reduce + * performance as some optimizations (`_writev()`) + * will be disabled. When providing the `fs` option, overrides for at least one of`write` and `writev` are required. If no `fd` option is supplied, an override + * for `open` is also required. If `autoClose` is `true`, an override for `close`is also required. + * + * Like `fs.ReadStream`, if `fd` is specified, `fs.WriteStream` will ignore the`path` argument and will use the specified file descriptor. This means that no`'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s + * should be passed to `net.Socket`. + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createWriteStream(path: PathLike, options?: BufferEncoding | StreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. No arguments other + * than a possible + * exception are given to the completion callback. + * @since v0.1.96 + */ + export function fdatasync(fd: number, callback: NoParamCallback): void; + export namespace fdatasync { + /** + * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`. + * @since v0.1.96 + */ + export function fdatasyncSync(fd: number): void; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. No arguments other than a possible exception are given to the + * callback function. Node.js makes no guarantees about the atomicity of the copy + * operation. If an error occurs after the destination file has been opened for + * writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFile, constants } from 'fs'; + * + * function callback(err) { + * if (err) throw err; + * console.log('source.txt was copied to destination.txt'); + * } + * + * // destination.txt will be created or overwritten by default. + * copyFile('source.txt', 'destination.txt', callback); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL, callback); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFile(src: PathLike, dest: PathLike, callback: NoParamCallback): void; + export function copyFile(src: PathLike, dest: PathLike, mode: number, callback: NoParamCallback): void; + export namespace copyFile { + function __promisify__(src: PathLike, dst: PathLike, mode?: number): Promise; + } + /** + * Synchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. Returns `undefined`. Node.js makes no guarantees about the + * atomicity of the copy operation. If an error occurs after the destination file + * has been opened for writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFileSync, constants } from 'fs'; + * + * // destination.txt will be created or overwritten by default. + * copyFileSync('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFileSync('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFileSync(src: PathLike, dest: PathLike, mode?: number): void; + /** + * Write an array of `ArrayBufferView`s to the file specified by `fd` using`writev()`. + * + * `position` is the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. + * + * The callback will be given three arguments: `err`, `bytesWritten`, and`buffers`. `bytesWritten` is how many bytes were written from `buffers`. + * + * If this method is `util.promisify()` ed, it returns a promise for an`Object` with `bytesWritten` and `buffers` properties. + * + * It is unsafe to use `fs.writev()` multiple times on the same file without + * waiting for the callback. For this scenario, use {@link createWriteStream}. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + */ + export function writev(fd: number, buffers: ReadonlyArray, cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void): void; + export function writev( + fd: number, + buffers: ReadonlyArray, + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void + ): void; + export interface WriteVResult { + bytesWritten: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace writev { + function __promisify__(fd: number, buffers: ReadonlyArray, position?: number): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writev}. + * @since v12.9.0 + * @return The number of bytes written. + */ + export function writevSync(fd: number, buffers: ReadonlyArray, position?: number): number; + /** + * Read from a file specified by `fd` and write to an array of `ArrayBufferView`s + * using `readv()`. + * + * `position` is the offset from the beginning of the file from where data + * should be read. If `typeof position !== 'number'`, the data will be read + * from the current position. + * + * The callback will be given three arguments: `err`, `bytesRead`, and`buffers`. `bytesRead` is how many bytes were read from the file. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffers` properties. + * @since v13.13.0, v12.17.0 + */ + export function readv(fd: number, buffers: ReadonlyArray, cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void): void; + export function readv( + fd: number, + buffers: ReadonlyArray, + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void + ): void; + export interface ReadVResult { + bytesRead: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace readv { + function __promisify__(fd: number, buffers: ReadonlyArray, position?: number): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readv}. + * @since v13.13.0, v12.17.0 + * @return The number of bytes read. + */ + export function readvSync(fd: number, buffers: ReadonlyArray, position?: number): number; + export interface OpenDirOptions { + encoding?: BufferEncoding | undefined; + /** + * Number of directory entries that are buffered + * internally when reading from the directory. Higher values lead to better + * performance but higher memory usage. + * @default 32 + */ + bufferSize?: number | undefined; + } + /** + * Synchronously open a directory. See [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html). + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendirSync(path: PathLike, options?: OpenDirOptions): Dir; + /** + * Asynchronously open a directory. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for + * more details. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendir(path: PathLike, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export function opendir(path: PathLike, options: OpenDirOptions, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export namespace opendir { + function __promisify__(path: PathLike, options?: OpenDirOptions): Promise; + } + export interface BigIntStats extends StatsBase { + atimeNs: bigint; + mtimeNs: bigint; + ctimeNs: bigint; + birthtimeNs: bigint; + } + export interface BigIntOptions { + bigint: true; + } + export interface StatOptions { + bigint?: boolean | undefined; + } + export interface StatSyncOptions extends StatOptions { + throwIfNoEntry?: boolean | undefined; + } + interface CopyOptionsBase { + /** + * Dereference symlinks + * @default false + */ + dereference?: boolean; + /** + * When `force` is `false`, and the destination + * exists, throw an error. + * @default false + */ + errorOnExist?: boolean; + /** + * Overwrite existing file or directory. _The copy + * operation will ignore errors if you set this to false and the destination + * exists. Use the `errorOnExist` option to change this behavior. + * @default true + */ + force?: boolean; + /** + * When `true` timestamps from `src` will + * be preserved. + * @default false + */ + preserveTimestamps?: boolean; + /** + * Copy directories recursively. + * @default false + */ + recursive?: boolean; + /** + * When true, path resolution for symlinks will be skipped + * @default false + */ + verbatimSymlinks?: boolean; + } + export interface CopyOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean | Promise; + } + export interface CopySyncOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean; + } + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cp(source: string | URL, destination: string | URL, callback: (err: NodeJS.ErrnoException | null) => void): void; + export function cp(source: string | URL, destination: string | URL, opts: CopyOptions, callback: (err: NodeJS.ErrnoException | null) => void): void; + /** + * Synchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cpSync(source: string | URL, destination: string | URL, opts?: CopySyncOptions): void; +} +declare module 'node:fs' { + export * from 'fs'; +} diff --git a/node_modules/@types/node/fs/promises.d.ts b/node_modules/@types/node/fs/promises.d.ts new file mode 100755 index 0000000..aca2fd5 --- /dev/null +++ b/node_modules/@types/node/fs/promises.d.ts @@ -0,0 +1,1138 @@ +/** + * The `fs/promises` API provides asynchronous file system methods that return + * promises. + * + * The promise APIs use the underlying Node.js threadpool to perform file + * system operations off the event loop thread. These operations are not + * synchronized or threadsafe. Care must be taken when performing multiple + * concurrent modifications on the same file or data corruption may occur. + * @since v10.0.0 + */ +declare module 'fs/promises' { + import { Abortable } from 'node:events'; + import { Stream } from 'node:stream'; + import { ReadableStream } from 'node:stream/web'; + import { + BigIntStats, + BufferEncodingOption, + constants as fsConstants, + CopyOptions, + Dir, + Dirent, + MakeDirectoryOptions, + Mode, + ObjectEncodingOptions, + OpenDirOptions, + OpenMode, + PathLike, + ReadStream, + ReadVResult, + RmDirOptions, + RmOptions, + StatOptions, + Stats, + TimeLike, + WatchEventType, + WatchOptions, + WriteStream, + WriteVResult, + } from 'node:fs'; + import { Interface as ReadlineInterface } from 'node:readline'; + + interface FileChangeInfo { + eventType: WatchEventType; + filename: T; + } + interface FlagAndOpenMode { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } + interface FileReadResult { + bytesRead: number; + buffer: T; + } + interface FileReadOptions { + /** + * @default `Buffer.alloc(0xffff)` + */ + buffer?: T; + /** + * @default 0 + */ + offset?: number | null; + /** + * @default `buffer.byteLength` + */ + length?: number | null; + position?: number | null; + } + interface CreateReadStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + end?: number | undefined; + highWaterMark?: number | undefined; + } + interface CreateWriteStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + } + // TODO: Add `EventEmitter` close + interface FileHandle { + /** + * The numeric file descriptor managed by the {FileHandle} object. + * @since v10.0.0 + */ + readonly fd: number; + /** + * Alias of `filehandle.writeFile()`. + * + * When operating on file handles, the mode cannot be changed from what it was set + * to with `fsPromises.open()`. Therefore, this is equivalent to `filehandle.writeFile()`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + appendFile(data: string | Uint8Array, options?: (ObjectEncodingOptions & FlagAndOpenMode) | BufferEncoding | null): Promise; + /** + * Changes the ownership of the file. A wrapper for [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html). + * @since v10.0.0 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + * @return Fulfills with `undefined` upon success. + */ + chown(uid: number, gid: number): Promise; + /** + * Modifies the permissions on the file. See [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html). + * @since v10.0.0 + * @param mode the file mode bit mask. + * @return Fulfills with `undefined` upon success. + */ + chmod(mode: Mode): Promise; + /** + * Unlike the 16 kb default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 kb. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `start` is + * omitted or `undefined`, `filehandle.createReadStream()` reads sequentially from + * the current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If the `FileHandle` points to a character device that only supports blocking + * reads (such as keyboard or sound card), read operations do not finish until data + * is available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * ```js + * import { open } from 'fs/promises'; + * + * const fd = await open('/dev/input/event0'); + * // Create a stream from some character device. + * const stream = fd.createReadStream(); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { open } from 'fs/promises'; + * + * const fd = await open('sample.txt'); + * fd.createReadStream({ start: 90, end: 99 }); + * ``` + * @since v16.11.0 + */ + createReadStream(options?: CreateReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` `open` option to be set to `r+` rather than + * the default `r`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * @since v16.11.0 + */ + createWriteStream(options?: CreateWriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. + * + * Unlike `filehandle.sync` this method does not flush modified metadata. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + datasync(): Promise; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fufills with `undefined` upon success. + */ + sync(): Promise; + /** + * Reads data from the file and stores that in the given buffer. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * @since v10.0.0 + * @param buffer A buffer that will be filled with the file data read. + * @param offset The location in the buffer at which to start filling. + * @param length The number of bytes to read. + * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an + * integer, the current file position will remain unchanged. + * @return Fulfills upon success with an object with two properties: + */ + read(buffer: T, offset?: number | null, length?: number | null, position?: number | null): Promise>; + read(options?: FileReadOptions): Promise>; + /** + * Returns a `ReadableStream` that may be used to read the files data. + * + * An error will be thrown if this method is called more than once or is called after the `FileHandle` is closed + * or closing. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const chunk of file.readableWebStream()) + * console.log(chunk); + * + * await file.close(); + * ``` + * + * While the `ReadableStream` will read the file to completion, it will not close the `FileHandle` automatically. User code must still call the `fileHandle.close()` method. + * + * @since v17.0.0 + * @experimental + */ + readableWebStream(): ReadableStream; + /** + * Asynchronously reads the entire contents of a file. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support reading. + * + * If one or more `filehandle.read()` calls are made on a file handle and then a`filehandle.readFile()` call is made, the data will be read from the current + * position till the end of the file. It doesn't always read from the beginning + * of the file. + * @since v10.0.0 + * @return Fulfills upon a successful read with the contents of the file. If no encoding is specified (using `options.encoding`), the data is returned as a {Buffer} object. Otherwise, the + * data will be a string. + */ + readFile( + options?: { + encoding?: null | undefined; + flag?: OpenMode | undefined; + } | null + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options: + | { + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } + | BufferEncoding + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options?: + | (ObjectEncodingOptions & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Convenience method to create a `readline` interface and stream over the file. For example: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const line of file.readLines()) { + * console.log(line); + * } + * ``` + * + * @since v18.11.0 + * @param options See `filehandle.createReadStream()` for the options. + */ + readLines(options?: CreateReadStreamOptions): ReadlineInterface; + /** + * @since v10.0.0 + * @return Fulfills with an {fs.Stats} for the file. + */ + stat( + opts?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + stat( + opts: StatOptions & { + bigint: true; + } + ): Promise; + stat(opts?: StatOptions): Promise; + /** + * Truncates the file. + * + * If the file was larger than `len` bytes, only the first `len` bytes will be + * retained in the file. + * + * The following example retains only the first four bytes of the file: + * + * ```js + * import { open } from 'fs/promises'; + * + * let filehandle = null; + * try { + * filehandle = await open('temp.txt', 'r+'); + * await filehandle.truncate(4); + * } finally { + * await filehandle?.close(); + * } + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + truncate(len?: number): Promise; + /** + * Change the file system timestamps of the object referenced by the `FileHandle` then resolves the promise with no arguments upon success. + * @since v10.0.0 + */ + utimes(atime: TimeLike, mtime: TimeLike): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface) or + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * The promise is resolved with no arguments upon success. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support writing. + * + * It is unsafe to use `filehandle.writeFile()` multiple times on the same file + * without waiting for the promise to be resolved (or rejected). + * + * If one or more `filehandle.write()` calls are made on a file handle and then a`filehandle.writeFile()` call is made, the data will be written from the + * current position till the end of the file. It doesn't always write from the + * beginning of the file. + * @since v10.0.0 + */ + writeFile(data: string | Uint8Array, options?: (ObjectEncodingOptions & FlagAndOpenMode & Abortable) | BufferEncoding | null): Promise; + /** + * Write `buffer` to the file. + * + * The promise is resolved with an object containing two properties: + * + * It is unsafe to use `filehandle.write()` multiple times on the same file + * without waiting for the promise to be resolved (or rejected). For this + * scenario, use `filehandle.createWriteStream()`. + * + * On Linux, positional writes do not work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v10.0.0 + * @param [offset=0] The start position from within `buffer` where the data to write begins. + * @param [length=buffer.byteLength - offset] The number of bytes from `buffer` to write. + * @param position The offset from the beginning of the file where the data from `buffer` should be written. If `position` is not a `number`, the data will be written at the current position. + * See the POSIX pwrite(2) documentation for more detail. + */ + write( + buffer: TBuffer, + offset?: number | null, + length?: number | null, + position?: number | null + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + data: string, + position?: number | null, + encoding?: BufferEncoding | null + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + /** + * Write an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s to the file. + * + * The promise is resolved with an object containing a two properties: + * + * It is unsafe to call `writev()` multiple times on the same file without waiting + * for the promise to be resolved (or rejected). + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param position The offset from the beginning of the file where the data from `buffers` should be written. If `position` is not a `number`, the data will be written at the current + * position. + */ + writev(buffers: ReadonlyArray, position?: number): Promise; + /** + * Read from a file and write to an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s + * @since v13.13.0, v12.17.0 + * @param position The offset from the beginning of the file where the data should be read from. If `position` is not a `number`, the data will be read from the current position. + * @return Fulfills upon success an object containing two properties: + */ + readv(buffers: ReadonlyArray, position?: number): Promise; + /** + * Closes the file handle after waiting for any pending operation on the handle to + * complete. + * + * ```js + * import { open } from 'fs/promises'; + * + * let filehandle; + * try { + * filehandle = await open('thefile.txt', 'r'); + * } finally { + * await filehandle?.close(); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + close(): Promise; + } + + const constants: typeof fsConstants; + + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If the accessibility check is successful, the promise is resolved with no + * value. If any of the accessibility checks fail, the promise is rejected + * with an [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object. The following example checks if the file`/etc/passwd` can be read and + * written by the current process. + * + * ```js + * import { access } from 'fs/promises'; + * import { constants } from 'fs'; + * + * try { + * await access('/etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can access'); + * } catch { + * console.error('cannot access'); + * } + * ``` + * + * Using `fsPromises.access()` to check for the accessibility of a file before + * calling `fsPromises.open()` is not recommended. Doing so introduces a race + * condition, since other processes may change the file's state between the two + * calls. Instead, user code should open/read/write the file directly and handle + * the error raised if the file is not accessible. + * @since v10.0.0 + * @param [mode=fs.constants.F_OK] + * @return Fulfills with `undefined` upon success. + */ + function access(path: PathLike, mode?: number): Promise; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. + * + * No guarantees are made about the atomicity of the copy operation. If an + * error occurs after the destination file has been opened for writing, an attempt + * will be made to remove the destination. + * + * ```js + * import { constants } from 'fs'; + * import { copyFile } from 'fs/promises'; + * + * try { + * await copyFile('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.log('The file could not be copied'); + * } + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * try { + * await copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.log('The file could not be copied'); + * } + * ``` + * @since v10.0.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] Optional modifiers that specify the behavior of the copy operation. It is possible to create a mask consisting of the bitwise OR of two or more values (e.g. + * `fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`) + * @return Fulfills with `undefined` upon success. + */ + function copyFile(src: PathLike, dest: PathLike, mode?: number): Promise; + /** + * Opens a `FileHandle`. + * + * Refer to the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more detail. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * @since v10.0.0 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] Sets the file mode (permission and sticky bits) if the file is created. + * @return Fulfills with a {FileHandle} object. + */ + function open(path: PathLike, flags?: string | number, mode?: Mode): Promise; + /** + * Renames `oldPath` to `newPath`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rename(oldPath: PathLike, newPath: PathLike): Promise; + /** + * Truncates (shortens or extends the length) of the content at `path` to `len`bytes. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + function truncate(path: PathLike, len?: number): Promise; + /** + * Removes the directory identified by `path`. + * + * Using `fsPromises.rmdir()` on a file (not a directory) results in the + * promise being rejected with an `ENOENT` error on Windows and an `ENOTDIR`error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use `fsPromises.rm()` with options `{ recursive: true, force: true }`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rmdir(path: PathLike, options?: RmDirOptions): Promise; + /** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * @since v14.14.0 + * @return Fulfills with `undefined` upon success. + */ + function rm(path: PathLike, options?: RmOptions): Promise; + /** + * Asynchronously creates a directory. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fsPromises.mkdir()` when `path` is a directory + * that exists results in a + * rejection only when `recursive` is false. + * @since v10.0.0 + * @return Upon success, fulfills with `undefined` if `recursive` is `false`, or the first directory path created if `recursive` is `true`. + */ + function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + } + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + /** + * Reads the contents of a directory. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames. If the `encoding` is set to `'buffer'`, the filenames returned + * will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the resolved array will contain `fs.Dirent` objects. + * + * ```js + * import { readdir } from 'fs/promises'; + * + * try { + * const files = await readdir(path); + * for (const file of files) + * console.log(file); + * } catch (err) { + * console.error(err); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with an array of the names of the files in the directory excluding `'.'` and `'..'`. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options: + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + | 'buffer' + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + } + ): Promise; + /** + * Reads the contents of the symbolic link referred to by `path`. See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more detail. The promise is + * resolved with the`linkString` upon success. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, the link path + * returned will be passed as a `Buffer` object. + * @since v10.0.0 + * @return Fulfills with the `linkString` upon success. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | string | null): Promise; + /** + * Creates a symbolic link. + * + * The `type` argument is only used on Windows platforms and can be one of `'dir'`,`'file'`, or `'junction'`. Windows junction points require the destination path + * to be absolute. When using `'junction'`, the `target` argument will + * automatically be normalized to absolute path. + * @since v10.0.0 + * @param [type='file'] + * @return Fulfills with `undefined` upon success. + */ + function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; + /** + * Equivalent to `fsPromises.stat()` unless `path` refers to a symbolic link, + * in which case the link itself is stat-ed, not the file that it refers to. + * Refer to the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) document for more detail. + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given symbolic link `path`. + */ + function lstat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function lstat( + path: PathLike, + opts: StatOptions & { + bigint: true; + } + ): Promise; + function lstat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given `path`. + */ + function stat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function stat( + path: PathLike, + opts: StatOptions & { + bigint: true; + } + ): Promise; + function stat(path: PathLike, opts?: StatOptions): Promise; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function link(existingPath: PathLike, newPath: PathLike): Promise; + /** + * If `path` refers to a symbolic link, then the link is removed without affecting + * the file or directory to which that link refers. If the `path` refers to a file + * path that is not a symbolic link, the file is deleted. See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function unlink(path: PathLike): Promise; + /** + * Changes the permissions of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the permissions on a symbolic link. + * + * This method is only implemented on macOS. + * @deprecated Since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the ownership on a symbolic link. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchown(path: PathLike, uid: number, gid: number): Promise; + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, with the difference that if the path refers to a + * symbolic link, then the link is not dereferenced: instead, the timestamps of + * the symbolic link itself are changed. + * @since v14.5.0, v12.19.0 + * @return Fulfills with `undefined` upon success. + */ + function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Changes the ownership of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chown(path: PathLike, uid: number, gid: number): Promise; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time, `Date`s, or a + * numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity` or`-Infinity`, an `Error` will be thrown. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Determines the actual location of `path` using the same semantics as the`fs.realpath.native()` function. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path. If the `encoding` is set to `'buffer'`, the path returned will be + * passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v10.0.0 + * @return Fulfills with the resolved path upon success. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Creates a unique temporary directory. A unique directory name is generated by + * appending six random characters to the end of the provided `prefix`. Due to + * platform inconsistencies, avoid trailing `X` characters in `prefix`. Some + * platforms, notably the BSDs, can return more than six random characters, and + * replace trailing `X` characters in `prefix` with random characters. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'fs/promises'; + * + * try { + * await mkdtemp(path.join(os.tmpdir(), 'foo-')); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * The `fsPromises.mkdtemp()` method will append the six randomly selected + * characters directly to the `prefix` string. For instance, given a directory`/tmp`, if the intention is to create a temporary directory _within_`/tmp`, the`prefix` must end with a trailing + * platform-specific path separator + * (`require('path').sep`). + * @since v10.0.0 + * @return Fulfills with a string containing the filesystem path of the newly created temporary directory. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface) or + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * If `options` is a string, then it specifies the encoding. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * Any specified `FileHandle` has to support writing. + * + * It is unsafe to use `fsPromises.writeFile()` multiple times on the same file + * without waiting for the promise to be settled. + * + * Similarly to `fsPromises.readFile` \- `fsPromises.writeFile` is a convenience + * method that performs multiple `write` calls internally to write the buffer + * passed to it. For performance sensitive code consider using `fs.createWriteStream()` or `filehandle.createWriteStream()`. + * + * It is possible to use an `AbortSignal` to cancel an `fsPromises.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'fs/promises'; + * import { Buffer } from 'buffer'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * const promise = writeFile('message.txt', data, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v10.0.0 + * @param file filename or `FileHandle` + * @return Fulfills with `undefined` upon success. + */ + function writeFile( + file: PathLike | FileHandle, + data: string | NodeJS.ArrayBufferView | Iterable | AsyncIterable | Stream, + options?: + | (ObjectEncodingOptions & { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * The `path` may be specified as a `FileHandle` that has been opened + * for appending (using `fsPromises.open()`). + * @since v10.0.0 + * @param path filename or {FileHandle} + * @return Fulfills with `undefined` upon success. + */ + function appendFile(path: PathLike | FileHandle, data: string | Uint8Array, options?: (ObjectEncodingOptions & FlagAndOpenMode) | BufferEncoding | null): Promise; + /** + * Asynchronously reads the entire contents of a file. + * + * If no encoding is specified (using `options.encoding`), the data is returned + * as a `Buffer` object. Otherwise, the data will be a string. + * + * If `options` is a string, then it specifies the encoding. + * + * When the `path` is a directory, the behavior of `fsPromises.readFile()` is + * platform-specific. On macOS, Linux, and Windows, the promise will be rejected + * with an error. On FreeBSD, a representation of the directory's contents will be + * returned. + * + * It is possible to abort an ongoing `readFile` using an `AbortSignal`. If a + * request is aborted the promise returned is rejected with an `AbortError`: + * + * ```js + * import { readFile } from 'fs/promises'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const promise = readFile(fileName, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * + * Any specified `FileHandle` has to support reading. + * @since v10.0.0 + * @param path filename or `FileHandle` + * @return Fulfills with the contents of the file. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ({ + encoding?: null | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | null + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options: + | ({ + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | (ObjectEncodingOptions & + Abortable & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronously open a directory for iterative scanning. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for more detail. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * + * Example using async iteration: + * + * ```js + * import { opendir } from 'fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + * @return Fulfills with an {fs.Dir}. + */ + function opendir(path: PathLike, options?: OpenDirOptions): Promise; + /** + * Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory. + * + * ```js + * const { watch } = require('fs/promises'); + * + * const ac = new AbortController(); + * const { signal } = ac; + * setTimeout(() => ac.abort(), 10000); + * + * (async () => { + * try { + * const watcher = watch(__filename, { signal }); + * for await (const event of watcher) + * console.log(event); + * } catch (err) { + * if (err.name === 'AbortError') + * return; + * throw err; + * } + * })(); + * ``` + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`. + * @since v15.9.0, v14.18.0 + * @return of objects with the properties: + */ + function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: 'buffer'; + }) + | 'buffer' + ): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options: WatchOptions | string): AsyncIterable> | AsyncIterable>; + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + * @return Fulfills with `undefined` upon success. + */ + function cp(source: string | URL, destination: string | URL, opts?: CopyOptions): Promise; +} +declare module 'node:fs/promises' { + export * from 'fs/promises'; +} diff --git a/node_modules/@types/node/globals.d.ts b/node_modules/@types/node/globals.d.ts new file mode 100755 index 0000000..a2b3152 --- /dev/null +++ b/node_modules/@types/node/globals.d.ts @@ -0,0 +1,300 @@ +// Declare "static" methods in Error +interface ErrorConstructor { + /** Create .stack property on a target object */ + captureStackTrace(targetObject: object, constructorOpt?: Function): void; + + /** + * Optional override for formatting stack traces + * + * @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces + */ + prepareStackTrace?: ((err: Error, stackTraces: NodeJS.CallSite[]) => any) | undefined; + + stackTraceLimit: number; +} + +/*-----------------------------------------------* + * * + * GLOBAL * + * * + ------------------------------------------------*/ + +// For backwards compability +interface NodeRequire extends NodeJS.Require { } +interface RequireResolve extends NodeJS.RequireResolve { } +interface NodeModule extends NodeJS.Module { } + +declare var process: NodeJS.Process; +declare var console: Console; + +declare var __filename: string; +declare var __dirname: string; + +declare var require: NodeRequire; +declare var module: NodeModule; + +// Same as module.exports +declare var exports: any; + +/** + * Only available if `--expose-gc` is passed to the process. + */ +declare var gc: undefined | (() => void); + +//#region borrowed +// from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib +/** A controller object that allows you to abort one or more DOM requests as and when desired. */ +interface AbortController { + /** + * Returns the AbortSignal object associated with this object. + */ + + readonly signal: AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + */ + abort(): void; +} + +/** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ +interface AbortSignal extends EventTarget { + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + */ + readonly aborted: boolean; +} + +declare var AbortController: typeof globalThis extends {onmessage: any; AbortController: infer T} + ? T + : { + prototype: AbortController; + new(): AbortController; + }; + +declare var AbortSignal: typeof globalThis extends {onmessage: any; AbortSignal: infer T} + ? T + : { + prototype: AbortSignal; + new(): AbortSignal; + // TODO: Add abort() static + timeout(milliseconds: number): AbortSignal; + }; +//#endregion borrowed + +//#region ArrayLike.at() +interface RelativeIndexable { + /** + * Takes an integer value and returns the item at that index, + * allowing for positive and negative integers. + * Negative integers count back from the last item in the array. + */ + at(index: number): T | undefined; +} +interface String extends RelativeIndexable {} +interface Array extends RelativeIndexable {} +interface ReadonlyArray extends RelativeIndexable {} +interface Int8Array extends RelativeIndexable {} +interface Uint8Array extends RelativeIndexable {} +interface Uint8ClampedArray extends RelativeIndexable {} +interface Int16Array extends RelativeIndexable {} +interface Uint16Array extends RelativeIndexable {} +interface Int32Array extends RelativeIndexable {} +interface Uint32Array extends RelativeIndexable {} +interface Float32Array extends RelativeIndexable {} +interface Float64Array extends RelativeIndexable {} +interface BigInt64Array extends RelativeIndexable {} +interface BigUint64Array extends RelativeIndexable {} +//#endregion ArrayLike.at() end + +/** + * @since v17.0.0 + * + * Creates a deep clone of an object. + */ +declare function structuredClone( + value: T, + transfer?: { transfer: ReadonlyArray }, +): T; + +/*----------------------------------------------* +* * +* GLOBAL INTERFACES * +* * +*-----------------------------------------------*/ +declare namespace NodeJS { + interface CallSite { + /** + * Value of "this" + */ + getThis(): unknown; + + /** + * Type of "this" as a string. + * This is the name of the function stored in the constructor field of + * "this", if available. Otherwise the object's [[Class]] internal + * property. + */ + getTypeName(): string | null; + + /** + * Current function + */ + getFunction(): Function | undefined; + + /** + * Name of the current function, typically its name property. + * If a name property is not available an attempt will be made to try + * to infer a name from the function's context. + */ + getFunctionName(): string | null; + + /** + * Name of the property [of "this" or one of its prototypes] that holds + * the current function + */ + getMethodName(): string | null; + + /** + * Name of the script [if this function was defined in a script] + */ + getFileName(): string | null; + + /** + * Current line number [if this function was defined in a script] + */ + getLineNumber(): number | null; + + /** + * Current column number [if this function was defined in a script] + */ + getColumnNumber(): number | null; + + /** + * A call site object representing the location where eval was called + * [if this function was created using a call to eval] + */ + getEvalOrigin(): string | undefined; + + /** + * Is this a toplevel invocation, that is, is "this" the global object? + */ + isToplevel(): boolean; + + /** + * Does this call take place in code defined by a call to eval? + */ + isEval(): boolean; + + /** + * Is this call in native V8 code? + */ + isNative(): boolean; + + /** + * Is this a constructor call? + */ + isConstructor(): boolean; + } + + interface ErrnoException extends Error { + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; + } + + interface ReadableStream extends EventEmitter { + readable: boolean; + read(size?: number): string | Buffer; + setEncoding(encoding: BufferEncoding): this; + pause(): this; + resume(): this; + isPaused(): boolean; + pipe(destination: T, options?: { end?: boolean | undefined; }): T; + unpipe(destination?: WritableStream): this; + unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; + wrap(oldStream: ReadableStream): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + interface WritableStream extends EventEmitter { + writable: boolean; + write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; + write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean; + end(cb?: () => void): this; + end(data: string | Uint8Array, cb?: () => void): this; + end(str: string, encoding?: BufferEncoding, cb?: () => void): this; + } + + interface ReadWriteStream extends ReadableStream, WritableStream { } + + interface RefCounted { + ref(): this; + unref(): this; + } + + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = TypedArray | DataView; + + interface Require { + (id: string): any; + resolve: RequireResolve; + cache: Dict; + /** + * @deprecated + */ + extensions: RequireExtensions; + main: Module | undefined; + } + + interface RequireResolve { + (id: string, options?: { paths?: string[] | undefined; }): string; + paths(request: string): string[] | null; + } + + interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { + '.js': (m: Module, filename: string) => any; + '.json': (m: Module, filename: string) => any; + '.node': (m: Module, filename: string) => any; + } + interface Module { + /** + * `true` if the module is running during the Node.js preload + */ + isPreloading: boolean; + exports: any; + require: Require; + id: string; + filename: string; + loaded: boolean; + /** @deprecated since v14.6.0 Please use `require.main` and `module.children` instead. */ + parent: Module | null | undefined; + children: Module[]; + /** + * @since v11.14.0 + * + * The directory name of the module. This is usually the same as the path.dirname() of the module.id. + */ + path: string; + paths: string[]; + } + + interface Dict { + [key: string]: T | undefined; + } + + interface ReadOnlyDict { + readonly [key: string]: T | undefined; + } +} diff --git a/node_modules/@types/node/globals.global.d.ts b/node_modules/@types/node/globals.global.d.ts new file mode 100755 index 0000000..ef1198c --- /dev/null +++ b/node_modules/@types/node/globals.global.d.ts @@ -0,0 +1 @@ +declare var global: typeof globalThis; diff --git a/node_modules/@types/node/http.d.ts b/node_modules/@types/node/http.d.ts new file mode 100755 index 0000000..9876c4f --- /dev/null +++ b/node_modules/@types/node/http.d.ts @@ -0,0 +1,1607 @@ +/** + * To use the HTTP server and client one must `require('http')`. + * + * The HTTP interfaces in Node.js are designed to support many features + * of the protocol which have been traditionally difficult to use. + * In particular, large, possibly chunk-encoded, messages. The interface is + * careful to never buffer entire requests or responses, so the + * user is able to stream data. + * + * HTTP message headers are represented by an object like this: + * + * ```js + * { 'content-length': '123', + * 'content-type': 'text/plain', + * 'connection': 'keep-alive', + * 'host': 'example.com', + * 'accept': '*' } + * ``` + * + * Keys are lowercased. Values are not modified. + * + * In order to support the full spectrum of possible HTTP applications, the Node.js + * HTTP API is very low-level. It deals with stream handling and message + * parsing only. It parses a message into headers and body but it does not + * parse the actual headers or the body. + * + * See `message.headers` for details on how duplicate headers are handled. + * + * The raw headers as they were received are retained in the `rawHeaders`property, which is an array of `[key, value, key2, value2, ...]`. For + * example, the previous message header object might have a `rawHeaders`list like the following: + * + * ```js + * [ 'ConTent-Length', '123456', + * 'content-LENGTH', '123', + * 'content-type', 'text/plain', + * 'CONNECTION', 'keep-alive', + * 'Host', 'example.com', + * 'accepT', '*' ] + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/http.js) + */ +declare module 'http' { + import * as stream from 'node:stream'; + import { URL } from 'node:url'; + import { TcpSocketConnectOpts, Socket, Server as NetServer, LookupFunction } from 'node:net'; + // incoming headers will never contain number + interface IncomingHttpHeaders extends NodeJS.Dict { + accept?: string | undefined; + 'accept-language'?: string | undefined; + 'accept-patch'?: string | undefined; + 'accept-ranges'?: string | undefined; + 'access-control-allow-credentials'?: string | undefined; + 'access-control-allow-headers'?: string | undefined; + 'access-control-allow-methods'?: string | undefined; + 'access-control-allow-origin'?: string | undefined; + 'access-control-expose-headers'?: string | undefined; + 'access-control-max-age'?: string | undefined; + 'access-control-request-headers'?: string | undefined; + 'access-control-request-method'?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + 'alt-svc'?: string | undefined; + authorization?: string | undefined; + 'cache-control'?: string | undefined; + connection?: string | undefined; + 'content-disposition'?: string | undefined; + 'content-encoding'?: string | undefined; + 'content-language'?: string | undefined; + 'content-length'?: string | undefined; + 'content-location'?: string | undefined; + 'content-range'?: string | undefined; + 'content-type'?: string | undefined; + cookie?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + 'if-match'?: string | undefined; + 'if-modified-since'?: string | undefined; + 'if-none-match'?: string | undefined; + 'if-unmodified-since'?: string | undefined; + 'last-modified'?: string | undefined; + location?: string | undefined; + origin?: string | undefined; + pragma?: string | undefined; + 'proxy-authenticate'?: string | undefined; + 'proxy-authorization'?: string | undefined; + 'public-key-pins'?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + 'retry-after'?: string | undefined; + 'sec-websocket-accept'?: string | undefined; + 'sec-websocket-extensions'?: string | undefined; + 'sec-websocket-key'?: string | undefined; + 'sec-websocket-protocol'?: string | undefined; + 'sec-websocket-version'?: string | undefined; + 'set-cookie'?: string[] | undefined; + 'strict-transport-security'?: string | undefined; + tk?: string | undefined; + trailer?: string | undefined; + 'transfer-encoding'?: string | undefined; + upgrade?: string | undefined; + 'user-agent'?: string | undefined; + vary?: string | undefined; + via?: string | undefined; + warning?: string | undefined; + 'www-authenticate'?: string | undefined; + } + // outgoing headers allows numbers (as they are converted internally to strings) + type OutgoingHttpHeader = number | string | string[]; + interface OutgoingHttpHeaders extends NodeJS.Dict {} + interface ClientRequestArgs { + signal?: AbortSignal | undefined; + protocol?: string | null | undefined; + host?: string | null | undefined; + hostname?: string | null | undefined; + family?: number | undefined; + port?: number | string | null | undefined; + defaultPort?: number | string | undefined; + localAddress?: string | undefined; + socketPath?: string | undefined; + /** + * @default 8192 + */ + maxHeaderSize?: number | undefined; + method?: string | undefined; + path?: string | null | undefined; + headers?: OutgoingHttpHeaders | undefined; + auth?: string | null | undefined; + agent?: Agent | boolean | undefined; + _defaultAgent?: Agent | undefined; + timeout?: number | undefined; + setHost?: boolean | undefined; + // https://github.com/nodejs/node/blob/master/lib/_http_client.js#L278 + createConnection?: + | ((options: ClientRequestArgs, oncreate: (err: Error, socket: Socket) => void) => Socket) + | undefined; + lookup?: LookupFunction | undefined; + } + interface ServerOptions< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > { + IncomingMessage?: Request | undefined; + ServerResponse?: Response | undefined; + /** + * Optionally overrides the value of + * `--max-http-header-size` for requests received by this server, i.e. + * the maximum length of request headers in bytes. + * @default 8192 + */ + maxHeaderSize?: number | undefined; + /** + * Use an insecure HTTP parser that accepts invalid HTTP headers when true. + * Using the insecure parser should be avoided. + * See --insecure-http-parser for more information. + * @default false + */ + insecureHTTPParser?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + type RequestListener< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > = (req: InstanceType, res: InstanceType & { req: InstanceType }) => void; + /** + * @since v0.1.17 + */ + class Server< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > extends NetServer { + constructor(requestListener?: RequestListener); + constructor(options: ServerOptions, requestListener?: RequestListener); + /** + * Sets the timeout value for sockets, and emits a `'timeout'` event on + * the Server object, passing the socket as an argument, if a timeout + * occurs. + * + * If there is a `'timeout'` event listener on the Server object, then it + * will be called with the timed-out socket as an argument. + * + * By default, the Server does not timeout sockets. However, if a callback + * is assigned to the Server's `'timeout'` event, timeouts must be handled + * explicitly. + * @since v0.9.12 + * @param [msecs=0 (no timeout)] + */ + setTimeout(msecs?: number, callback?: () => void): this; + setTimeout(callback: () => void): this; + /** + * Limits maximum incoming headers count. If set to 0, no limit will be applied. + * @since v0.7.0 + */ + maxHeadersCount: number | null; + /** + * The maximum number of requests socket can handle + * before closing keep alive connection. + * + * A value of `0` will disable the limit. + * + * When the limit is reached it will set the `Connection` header value to `close`, + * but will not actually close the connection, subsequent requests sent + * after the limit is reached will get `503 Service Unavailable` as a response. + * @since v16.10.0 + */ + maxRequestsPerSocket: number | null; + /** + * The number of milliseconds of inactivity before a socket is presumed + * to have timed out. + * + * A value of `0` will disable the timeout behavior on incoming connections. + * + * The socket timeout logic is set up on connection, so changing this + * value only affects new connections to the server, not any existing connections. + * @since v0.9.12 + */ + timeout: number; + /** + * Limit the amount of time the parser will wait to receive the complete HTTP + * headers. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v11.3.0, v10.14.0 + */ + headersTimeout: number; + /** + * The number of milliseconds of inactivity a server needs to wait for additional + * incoming data, after it has finished writing the last response, before a socket + * will be destroyed. If the server receives new data before the keep-alive + * timeout has fired, it will reset the regular inactivity timeout, i.e.,`server.timeout`. + * + * A value of `0` will disable the keep-alive timeout behavior on incoming + * connections. + * A value of `0` makes the http server behave similarly to Node.js versions prior + * to 8.0.0, which did not have a keep-alive timeout. + * + * The socket timeout logic is set up on connection, so changing this value only + * affects new connections to the server, not any existing connections. + * @since v8.0.0 + */ + keepAliveTimeout: number; + /** + * Sets the timeout value in milliseconds for receiving the entire request from + * the client. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v14.11.0 + */ + requestTimeout: number; + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connection', listener: (socket: Socket) => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'checkContinue', listener: RequestListener): this; + addListener(event: 'checkExpectation', listener: RequestListener): this; + addListener(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + addListener( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + addListener(event: 'request', listener: RequestListener): this; + addListener( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'connection', socket: Socket): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit( + event: 'checkContinue', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: 'checkExpectation', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'clientError', err: Error, socket: stream.Duplex): boolean; + emit(event: 'connect', req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + emit( + event: 'request', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'upgrade', req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connection', listener: (socket: Socket) => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'checkContinue', listener: RequestListener): this; + on(event: 'checkExpectation', listener: RequestListener): this; + on(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + on(event: 'connect', listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + on(event: 'request', listener: RequestListener): this; + on(event: 'upgrade', listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connection', listener: (socket: Socket) => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'checkContinue', listener: RequestListener): this; + once(event: 'checkExpectation', listener: RequestListener): this; + once(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + once( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + once(event: 'request', listener: RequestListener): this; + once( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connection', listener: (socket: Socket) => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'checkContinue', listener: RequestListener): this; + prependListener(event: 'checkExpectation', listener: RequestListener): this; + prependListener(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + prependListener( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: 'request', listener: RequestListener): this; + prependListener( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connection', listener: (socket: Socket) => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'checkContinue', listener: RequestListener): this; + prependOnceListener(event: 'checkExpectation', listener: RequestListener): this; + prependOnceListener(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + prependOnceListener( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: 'request', listener: RequestListener): this; + prependOnceListener( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + } + /** + * This class serves as the parent class of {@link ClientRequest} and {@link ServerResponse}. It is an abstract of outgoing message from + * the perspective of the participants of HTTP transaction. + * @since v0.1.17 + */ + class OutgoingMessage extends stream.Writable { + readonly req: Request; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + /** + * @deprecated Use `writableEnded` instead. + */ + finished: boolean; + /** + * Read-only. `true` if the headers were sent, otherwise `false`. + * @since v0.9.3 + */ + readonly headersSent: boolean; + /** + * Aliases of `outgoingMessage.socket` + * @since v0.3.0 + * @deprecated Since v15.12.0,v14.17.1 - Use `socket` instead. + */ + readonly connection: Socket | null; + /** + * Reference to the underlying socket. Usually, users will not want to access + * this property. + * + * After calling `outgoingMessage.end()`, this property will be nulled. + * @since v0.3.0 + */ + readonly socket: Socket | null; + constructor(); + /** + * Once a socket is associated with the message and is connected,`socket.setTimeout()` will be called with `msecs` as the first parameter. + * @since v0.9.12 + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `timeout` event. + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * Sets a single header value for the header object. + * @since v0.4.0 + * @param name Header name + * @param value Header value + */ + setHeader(name: string, value: number | string | ReadonlyArray): this; + /** + * Gets the value of HTTP header with the given name. If such a name doesn't + * exist in message, it will be `undefined`. + * @since v0.4.0 + * @param name Name of header + */ + getHeader(name: string): number | string | string[] | undefined; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow + * copy is used, array values may be mutated without additional calls to + * various header-related HTTP module methods. The keys of the returned + * object are the header names and the values are the respective header + * values. All header names are lowercase. + * + * The object returned by the `outgoingMessage.getHeaders()` method does + * not prototypically inherit from the JavaScript Object. This means that + * typical Object methods such as `obj.toString()`, `obj.hasOwnProperty()`, + * and others are not defined and will not work. + * + * ```js + * outgoingMessage.setHeader('Foo', 'bar'); + * outgoingMessage.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = outgoingMessage.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v7.7.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns an array of names of headers of the outgoing outgoingMessage. All + * names are lowercase. + * @since v7.7.0 + */ + getHeaderNames(): string[]; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name is case-insensitive. + * + * ```js + * const hasContentType = outgoingMessage.hasHeader('content-type'); + * ``` + * @since v7.7.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that is queued for implicit sending. + * + * ```js + * outgoingMessage.removeHeader('Content-Encoding'); + * ``` + * @since v0.4.0 + * @param name Header name + */ + removeHeader(name: string): void; + /** + * Adds HTTP trailers (headers but at the end of the message) to the message. + * + * Trailers are **only** be emitted if the message is chunked encoded. If not, + * the trailer will be silently discarded. + * + * HTTP requires the `Trailer` header to be sent to emit trailers, + * with a list of header fields in its value, e.g. + * + * ```js + * message.writeHead(200, { 'Content-Type': 'text/plain', + * 'Trailer': 'Content-MD5' }); + * message.write(fileData); + * message.addTrailers({ 'Content-MD5': '7895bf4b8828b55ceaf47747b4bca667' }); + * message.end(); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.3.0 + */ + addTrailers(headers: OutgoingHttpHeaders | ReadonlyArray<[string, string]>): void; + /** + * Compulsorily flushes the message headers + * + * For efficiency reason, Node.js normally buffers the message headers + * until `outgoingMessage.end()` is called or the first chunk of message data + * is written. It then tries to pack the headers and data into a single TCP + * packet. + * + * It is usually desired (it saves a TCP round-trip), but not when the first + * data is not sent until possibly much later. `outgoingMessage.flushHeaders()`bypasses the optimization and kickstarts the request. + * @since v1.6.0 + */ + flushHeaders(): void; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v0.1.17 + */ + class ServerResponse extends OutgoingMessage { + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v0.4.0 + */ + statusCode: number; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status message that will be sent to the client when + * the headers get flushed. If this is left as `undefined` then the standard + * message for the status code will be used. + * + * ```js + * response.statusMessage = 'Not found'; + * ``` + * + * After response header was sent to the client, this property indicates the + * status message which was sent out. + * @since v0.11.8 + */ + statusMessage: string; + constructor(req: Request); + assignSocket(socket: Socket): void; + detachSocket(socket: Socket): void; + /** + * Sends an HTTP/1.1 100 Continue message to the client, indicating that + * the request body should be sent. See the `'checkContinue'` event on`Server`. + * @since v0.3.0 + */ + writeContinue(callback?: () => void): void; + /** + * Sends an HTTP/1.1 103 Early Hints message to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. The optional `callback` argument will be called when + * the response message has been written. + * + * Example: + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics' + * }); + * + * const earlyHintsCallback = () => console.log('early hints message sent'); + * response.writeEarlyHints({ + * 'link': earlyHintsLinks + * }, earlyHintsCallback); + * ``` + * + * @since v18.11.0 + * @param hints An object containing the values of headers + * @param callback Will be called when the response message has been written + */ + writeEarlyHints(hints: Record, callback?: () => void): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * Optionally one can give a human-readable `statusMessage` as the second + * argument. + * + * `headers` may be an `Array` where the keys and values are in the same list. + * It is _not_ a list of tuples. So, the even-numbered offsets are key values, + * and the odd-numbered offsets are the associated values. The array is in the same + * format as `request.rawHeaders`. + * + * Returns a reference to the `ServerResponse`, so that calls can be chained. + * + * ```js + * const body = 'hello world'; + * response + * .writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain' + * }) + * .end(body); + * ``` + * + * This method must only be called once on a message and it must + * be called before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * If this method is called and `response.setHeader()` has not been called, + * it will directly write the supplied header values onto the network channel + * without caching internally, and the `response.getHeader()` on the header + * will not yield the expected result. If progressive population of headers is + * desired with potential future retrieval and modification, use `response.setHeader()` instead. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * `Content-Length` is given in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. Node.js + * does not check whether `Content-Length` and the length of the body which has + * been transmitted are equal or not. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.1.30 + */ + writeHead( + statusCode: number, + statusMessage?: string, + headers?: OutgoingHttpHeaders | OutgoingHttpHeader[], + ): this; + writeHead(statusCode: number, headers?: OutgoingHttpHeaders | OutgoingHttpHeader[]): this; + /** + * Sends an HTTP/1.1 102 Processing message to the client, indicating that + * the request body should be sent. + * @since v10.0.0 + */ + writeProcessing(): void; + } + interface InformationEvent { + statusCode: number; + statusMessage: string; + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + headers: IncomingHttpHeaders; + rawHeaders: string[]; + } + /** + * This object is created internally and returned from {@link request}. It + * represents an _in-progress_ request whose header has already been queued. The + * header is still mutable using the `setHeader(name, value)`,`getHeader(name)`, `removeHeader(name)` API. The actual header will + * be sent along with the first data chunk or when calling `request.end()`. + * + * To get the response, add a listener for `'response'` to the request object.`'response'` will be emitted from the request object when the response + * headers have been received. The `'response'` event is executed with one + * argument which is an instance of {@link IncomingMessage}. + * + * During the `'response'` event, one can add listeners to the + * response object; particularly to listen for the `'data'` event. + * + * If no `'response'` handler is added, then the response will be + * entirely discarded. However, if a `'response'` event handler is added, + * then the data from the response object **must** be consumed, either by + * calling `response.read()` whenever there is a `'readable'` event, or + * by adding a `'data'` handler, or by calling the `.resume()` method. + * Until the data is consumed, the `'end'` event will not fire. Also, until + * the data is read it will consume memory that can eventually lead to a + * 'process out of memory' error. + * + * For backward compatibility, `res` will only emit `'error'` if there is an`'error'` listener registered. + * + * Node.js does not check whether Content-Length and the length of the + * body which has been transmitted are equal or not. + * @since v0.1.17 + */ + class ClientRequest extends OutgoingMessage { + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v0.11.14 + * @deprecated Since v17.0.0,v16.12.0 - Check `destroyed` instead. + */ + aborted: boolean; + /** + * The request host. + * @since v14.5.0, v12.19.0 + */ + host: string; + /** + * The request protocol. + * @since v14.5.0, v12.19.0 + */ + protocol: string; + /** + * When sending request through a keep-alive enabled agent, the underlying socket + * might be reused. But if server closes connection at unfortunate time, client + * may run into a 'ECONNRESET' error. + * + * ```js + * const http = require('http'); + * + * // Server has a 5 seconds keep-alive timeout by default + * http + * .createServer((req, res) => { + * res.write('hello\n'); + * res.end(); + * }) + * .listen(3000); + * + * setInterval(() => { + * // Adapting a keep-alive agent + * http.get('http://localhost:3000', { agent }, (res) => { + * res.on('data', (data) => { + * // Do nothing + * }); + * }); + * }, 5000); // Sending request on 5s interval so it's easy to hit idle timeout + * ``` + * + * By marking a request whether it reused socket or not, we can do + * automatic error retry base on it. + * + * ```js + * const http = require('http'); + * const agent = new http.Agent({ keepAlive: true }); + * + * function retriableRequest() { + * const req = http + * .get('http://localhost:3000', { agent }, (res) => { + * // ... + * }) + * .on('error', (err) => { + * // Check if retry is needed + * if (req.reusedSocket && err.code === 'ECONNRESET') { + * retriableRequest(); + * } + * }); + * } + * + * retriableRequest(); + * ``` + * @since v13.0.0, v12.16.0 + */ + reusedSocket: boolean; + /** + * Limits maximum response headers count. If set to 0, no limit will be applied. + */ + maxHeadersCount: number; + constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); + /** + * The request method. + * @since v0.1.97 + */ + method: string; + /** + * The request path. + * @since v0.4.0 + */ + path: string; + /** + * Marks the request as aborting. Calling this will cause remaining data + * in the response to be dropped and the socket to be destroyed. + * @since v0.3.8 + * @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead. + */ + abort(): void; + onSocket(socket: Socket): void; + /** + * Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called. + * @since v0.5.9 + * @param timeout Milliseconds before a request times out. + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `'timeout'` event. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Once a socket is assigned to this request and is connected `socket.setNoDelay()` will be called. + * @since v0.5.9 + */ + setNoDelay(noDelay?: boolean): void; + /** + * Once a socket is assigned to this request and is connected `socket.setKeepAlive()` will be called. + * @since v0.5.9 + */ + setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; + /** + * Returns an array containing the unique names of the current outgoing raw + * headers. Header names are returned with their exact casing being set. + * + * ```js + * request.setHeader('Foo', 'bar'); + * request.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = request.getRawHeaderNames(); + * // headerNames === ['Foo', 'Set-Cookie'] + * ``` + * @since v15.13.0, v14.17.0 + */ + getRawHeaderNames(): string[]; + /** + * @deprecated + */ + addListener(event: 'abort', listener: () => void): this; + addListener( + event: 'connect', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: 'continue', listener: () => void): this; + addListener(event: 'information', listener: (info: InformationEvent) => void): this; + addListener(event: 'response', listener: (response: IncomingMessage) => void): this; + addListener(event: 'socket', listener: (socket: Socket) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener( + event: 'upgrade', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + on(event: 'abort', listener: () => void): this; + on(event: 'connect', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: 'continue', listener: () => void): this; + on(event: 'information', listener: (info: InformationEvent) => void): this; + on(event: 'response', listener: (response: IncomingMessage) => void): this; + on(event: 'socket', listener: (socket: Socket) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: 'upgrade', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + once(event: 'abort', listener: () => void): this; + once(event: 'connect', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: 'continue', listener: () => void): this; + once(event: 'information', listener: (info: InformationEvent) => void): this; + once(event: 'response', listener: (response: IncomingMessage) => void): this; + once(event: 'socket', listener: (socket: Socket) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: 'upgrade', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependListener(event: 'abort', listener: () => void): this; + prependListener( + event: 'connect', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: 'continue', listener: () => void): this; + prependListener(event: 'information', listener: (info: InformationEvent) => void): this; + prependListener(event: 'response', listener: (response: IncomingMessage) => void): this; + prependListener(event: 'socket', listener: (socket: Socket) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener( + event: 'upgrade', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependOnceListener(event: 'abort', listener: () => void): this; + prependOnceListener( + event: 'connect', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: 'continue', listener: () => void): this; + prependOnceListener(event: 'information', listener: (info: InformationEvent) => void): this; + prependOnceListener(event: 'response', listener: (response: IncomingMessage) => void): this; + prependOnceListener(event: 'socket', listener: (socket: Socket) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener( + event: 'upgrade', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * An `IncomingMessage` object is created by {@link Server} or {@link ClientRequest} and passed as the first argument to the `'request'` and `'response'` event respectively. It may be used to + * access response + * status, headers and data. + * + * Different from its `socket` value which is a subclass of `stream.Duplex`, the`IncomingMessage` itself extends `stream.Readable` and is created separately to + * parse and emit the incoming HTTP headers and payload, as the underlying socket + * may be reused multiple times in case of keep-alive. + * @since v0.1.17 + */ + class IncomingMessage extends stream.Readable { + constructor(socket: Socket); + /** + * The `message.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + * @deprecated Since v17.0.0,v16.12.0 - Check `message.destroyed` from stream.Readable. + */ + aborted: boolean; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. + * Probably either `'1.1'` or `'1.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v0.1.1 + */ + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + /** + * The `message.complete` property will be `true` if a complete HTTP message has + * been received and successfully parsed. + * + * This property is particularly useful as a means of determining if a client or + * server fully transmitted a message before a connection was terminated: + * + * ```js + * const req = http.request({ + * host: '127.0.0.1', + * port: 8080, + * method: 'POST' + * }, (res) => { + * res.resume(); + * res.on('end', () => { + * if (!res.complete) + * console.error( + * 'The connection was terminated while the message was still being sent'); + * }); + * }); + * ``` + * @since v0.3.0 + */ + complete: boolean; + /** + * Alias for `message.socket`. + * @since v0.1.90 + * @deprecated Since v16.0.0 - Use `socket`. + */ + connection: Socket; + /** + * The `net.Socket` object associated with the connection. + * + * With HTTPS support, use `request.socket.getPeerCertificate()` to obtain the + * client's authentication details. + * + * This property is guaranteed to be an instance of the `net.Socket` class, + * a subclass of `stream.Duplex`, unless the user specified a socket + * type other than `net.Socket` or internally nulled. + * @since v0.3.0 + */ + socket: Socket; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.getHeaders()); + * ``` + * + * Duplicates in raw headers are handled in the following ways, depending on the + * header name: + * + * * Duplicates of `age`, `authorization`, `content-length`, `content-type`,`etag`, `expires`, `from`, `host`, `if-modified-since`, `if-unmodified-since`,`last-modified`, `location`, + * `max-forwards`, `proxy-authorization`, `referer`,`retry-after`, `server`, or `user-agent` are discarded. + * * `set-cookie` is always an array. Duplicates are added to the array. + * * For duplicate `cookie` headers, the values are joined together with '; '. + * * For all other headers, the values are joined together with ', '. + * @since v0.1.5 + */ + headers: IncomingHttpHeaders; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v0.11.6 + */ + rawHeaders: string[]; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v0.3.0 + */ + trailers: NodeJS.Dict; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v0.11.6 + */ + rawTrailers: string[]; + /** + * Calls `message.socket.setTimeout(msecs, callback)`. + * @since v0.5.9 + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * **Only valid for request obtained from {@link Server}.** + * + * The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. + * @since v0.1.1 + */ + method?: string | undefined; + /** + * **Only valid for request obtained from {@link Server}.** + * + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. Take the following request: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * To parse the URL into its parts: + * + * ```js + * new URL(request.url, `http://${request.getHeaders().host}`); + * ``` + * + * When `request.url` is `'/status?name=ryan'` and`request.getHeaders().host` is `'localhost:3000'`: + * + * ```console + * $ node + * > new URL(request.url, `http://${request.getHeaders().host}`) + * URL { + * href: 'http://localhost:3000/status?name=ryan', + * origin: 'http://localhost:3000', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'localhost:3000', + * hostname: 'localhost', + * port: '3000', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v0.1.90 + */ + url?: string | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The 3-digit HTTP response status code. E.G. `404`. + * @since v0.1.1 + */ + statusCode?: number | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The HTTP response status message (reason phrase). E.G. `OK` or `Internal Server Error`. + * @since v0.11.10 + */ + statusMessage?: string | undefined; + /** + * Calls `destroy()` on the socket that received the `IncomingMessage`. If `error`is provided, an `'error'` event is emitted on the socket and `error` is passed + * as an argument to any listeners on the event. + * @since v0.3.0 + */ + destroy(error?: Error): this; + } + interface AgentOptions extends Partial { + /** + * Keep sockets around in a pool to be used by other requests in the future. Default = false + */ + keepAlive?: boolean | undefined; + /** + * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. + * Only relevant if keepAlive is set to true. + */ + keepAliveMsecs?: number | undefined; + /** + * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity + */ + maxSockets?: number | undefined; + /** + * Maximum number of sockets allowed for all hosts in total. Each request will use a new socket until the maximum is reached. Default: Infinity. + */ + maxTotalSockets?: number | undefined; + /** + * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. + */ + maxFreeSockets?: number | undefined; + /** + * Socket timeout in milliseconds. This will set the timeout after the socket is connected. + */ + timeout?: number | undefined; + /** + * Scheduling strategy to apply when picking the next free socket to use. + * @default `lifo` + */ + scheduling?: 'fifo' | 'lifo' | undefined; + } + /** + * An `Agent` is responsible for managing connection persistence + * and reuse for HTTP clients. It maintains a queue of pending requests + * for a given host and port, reusing a single socket connection for each + * until the queue is empty, at which time the socket is either destroyed + * or put into a pool where it is kept to be used again for requests to the + * same host and port. Whether it is destroyed or pooled depends on the`keepAlive` `option`. + * + * Pooled connections have TCP Keep-Alive enabled for them, but servers may + * still close idle connections, in which case they will be removed from the + * pool and a new connection will be made when a new HTTP request is made for + * that host and port. Servers may also refuse to allow multiple requests + * over the same connection, in which case the connection will have to be + * remade for every request and cannot be pooled. The `Agent` will still make + * the requests to that server, but each one will occur over a new connection. + * + * When a connection is closed by the client or the server, it is removed + * from the pool. Any unused sockets in the pool will be unrefed so as not + * to keep the Node.js process running when there are no outstanding requests. + * (see `socket.unref()`). + * + * It is good practice, to `destroy()` an `Agent` instance when it is no + * longer in use, because unused sockets consume OS resources. + * + * Sockets are removed from an agent when the socket emits either + * a `'close'` event or an `'agentRemove'` event. When intending to keep one + * HTTP request open for a long time without keeping it in the agent, something + * like the following may be done: + * + * ```js + * http.get(options, (res) => { + * // Do stuff + * }).on('socket', (socket) => { + * socket.emit('agentRemove'); + * }); + * ``` + * + * An agent may also be used for an individual request. By providing`{agent: false}` as an option to the `http.get()` or `http.request()`functions, a one-time use `Agent` with default options + * will be used + * for the client connection. + * + * `agent:false`: + * + * ```js + * http.get({ + * hostname: 'localhost', + * port: 80, + * path: '/', + * agent: false // Create a new agent just for this one request + * }, (res) => { + * // Do stuff with response + * }); + * ``` + * @since v0.3.4 + */ + class Agent { + /** + * By default set to 256\. For agents with `keepAlive` enabled, this + * sets the maximum number of sockets that will be left open in the free + * state. + * @since v0.11.7 + */ + maxFreeSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open per origin. Origin is the returned value of `agent.getName()`. + * @since v0.3.6 + */ + maxSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open. Unlike `maxSockets`, this parameter applies across all origins. + * @since v14.5.0, v12.19.0 + */ + maxTotalSockets: number; + /** + * An object which contains arrays of sockets currently awaiting use by + * the agent when `keepAlive` is enabled. Do not modify. + * + * Sockets in the `freeSockets` list will be automatically destroyed and + * removed from the array on `'timeout'`. + * @since v0.11.4 + */ + readonly freeSockets: NodeJS.ReadOnlyDict; + /** + * An object which contains arrays of sockets currently in use by the + * agent. Do not modify. + * @since v0.3.6 + */ + readonly sockets: NodeJS.ReadOnlyDict; + /** + * An object which contains queues of requests that have not yet been assigned to + * sockets. Do not modify. + * @since v0.5.9 + */ + readonly requests: NodeJS.ReadOnlyDict; + constructor(opts?: AgentOptions); + /** + * Destroy any sockets that are currently in use by the agent. + * + * It is usually not necessary to do this. However, if using an + * agent with `keepAlive` enabled, then it is best to explicitly shut down + * the agent when it is no longer needed. Otherwise, + * sockets might stay open for quite a long time before the server + * terminates them. + * @since v0.11.4 + */ + destroy(): void; + } + const METHODS: string[]; + const STATUS_CODES: { + [errorCode: number]: string | undefined; + [errorCode: string]: string | undefined; + }; + /** + * Returns a new instance of {@link Server}. + * + * The `requestListener` is a function which is automatically + * added to the `'request'` event. + * @since v0.1.13 + */ + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >(requestListener?: RequestListener): Server; + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >( + options: ServerOptions, + requestListener?: RequestListener, + ): Server; + // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, + // create interface RequestOptions would make the naming more clear to developers + interface RequestOptions extends ClientRequestArgs {} + /** + * `options` in `socket.connect()` are also supported. + * + * Node.js maintains several connections per server to make HTTP requests. + * This function allows one to transparently issue requests. + * + * `url` can be a string or a `URL` object. If `url` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * If both `url` and `options` are specified, the objects are merged, with the`options` properties taking precedence. + * + * The optional `callback` parameter will be added as a one-time listener for + * the `'response'` event. + * + * `http.request()` returns an instance of the {@link ClientRequest} class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const http = require('http'); + * + * const postData = JSON.stringify({ + * 'msg': 'Hello World!' + * }); + * + * const options = { + * hostname: 'www.google.com', + * port: 80, + * path: '/upload', + * method: 'POST', + * headers: { + * 'Content-Type': 'application/json', + * 'Content-Length': Buffer.byteLength(postData) + * } + * }; + * + * const req = http.request(options, (res) => { + * console.log(`STATUS: ${res.statusCode}`); + * console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + * res.setEncoding('utf8'); + * res.on('data', (chunk) => { + * console.log(`BODY: ${chunk}`); + * }); + * res.on('end', () => { + * console.log('No more data in response.'); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(`problem with request: ${e.message}`); + * }); + * + * // Write data to request body + * req.write(postData); + * req.end(); + * ``` + * + * In the example `req.end()` was called. With `http.request()` one + * must always call `req.end()` to signify the end of the request - + * even if there is no data being written to the request body. + * + * If any error is encountered during the request (be that with DNS resolution, + * TCP level errors, or actual HTTP parse errors) an `'error'` event is emitted + * on the returned request object. As with all `'error'` events, if no listeners + * are registered the error will be thrown. + * + * There are a few special headers that should be noted. + * + * * Sending a 'Connection: keep-alive' will notify Node.js that the connection to + * the server should be persisted until the next request. + * * Sending a 'Content-Length' header will disable the default chunked encoding. + * * Sending an 'Expect' header will immediately send the request headers. + * Usually, when sending 'Expect: 100-continue', both a timeout and a listener + * for the `'continue'` event should be set. See RFC 2616 Section 8.2.3 for more + * information. + * * Sending an Authorization header will override using the `auth` option + * to compute basic authentication. + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('http://abc:xyz@example.com'); + * + * const req = http.request(options, (res) => { + * // ... + * }); + * ``` + * + * In a successful request, the following events will be emitted in the following + * order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * (`'data'` will not be emitted at all if the response body is empty, for + * instance, in most redirects) + * * `'end'` on the `res` object + * * `'close'` + * + * In the case of a connection error, the following events will be emitted: + * + * * `'socket'` + * * `'error'` + * * `'close'` + * + * In the case of a premature connection close before the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * In the case of a premature connection close after the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (connection closed here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.destroy()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.destroy()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.destroy()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.destroy()` called here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.abort()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.abort()` called here) + * * `'abort'` + * * `'close'` + * + * If `req.abort()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.abort()` called here) + * * `'abort'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.abort()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.abort()` called here) + * * `'abort'` + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * Setting the `timeout` option or using the `setTimeout()` function will + * not abort the request or do anything besides add a `'timeout'` event. + * + * Passing an `AbortSignal` and then calling `abort` on the corresponding`AbortController` will behave the same way as calling `.destroy()` on the + * request itself. + * @since v0.3.6 + */ + function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: IncomingMessage) => void, + ): ClientRequest; + /** + * Since most requests are GET requests without bodies, Node.js provides this + * convenience method. The only difference between this method and {@link request} is that it sets the method to GET and calls `req.end()`automatically. The callback must take care to consume the + * response + * data for reasons stated in {@link ClientRequest} section. + * + * The `callback` is invoked with a single argument that is an instance of {@link IncomingMessage}. + * + * JSON fetching example: + * + * ```js + * http.get('http://localhost:8000/', (res) => { + * const { statusCode } = res; + * const contentType = res.headers['content-type']; + * + * let error; + * // Any 2xx status code signals a successful response but + * // here we're only checking for 200. + * if (statusCode !== 200) { + * error = new Error('Request Failed.\n' + + * `Status Code: ${statusCode}`); + * } else if (!/^application\/json/.test(contentType)) { + * error = new Error('Invalid content-type.\n' + + * `Expected application/json but received ${contentType}`); + * } + * if (error) { + * console.error(error.message); + * // Consume response data to free up memory + * res.resume(); + * return; + * } + * + * res.setEncoding('utf8'); + * let rawData = ''; + * res.on('data', (chunk) => { rawData += chunk; }); + * res.on('end', () => { + * try { + * const parsedData = JSON.parse(rawData); + * console.log(parsedData); + * } catch (e) { + * console.error(e.message); + * } + * }); + * }).on('error', (e) => { + * console.error(`Got error: ${e.message}`); + * }); + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!' + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. Properties that are inherited from the prototype are ignored. + */ + function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function get(url: string | URL, options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest; + + /** + * Performs the low-level validations on the provided name that are done when `res.setHeader(name, value)` is called. + * Passing illegal value as name will result in a TypeError being thrown, identified by `code: 'ERR_INVALID_HTTP_TOKEN'`. + * @param name Header name + * @since v14.3.0 + */ + function validateHeaderName(name: string): void; + /** + * Performs the low-level validations on the provided value that are done when `res.setHeader(name, value)` is called. + * Passing illegal value as value will result in a TypeError being thrown. + * - Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`. + * - Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`. + * @param name Header name + * @param value Header value + * @since v14.3.0 + */ + function validateHeaderValue(name: string, value: string): void; + + let globalAgent: Agent; + /** + * Read-only property specifying the maximum allowed size of HTTP headers in bytes. + * Defaults to 16KB. Configurable using the `--max-http-header-size` CLI option. + */ + const maxHeaderSize: number; +} +declare module 'node:http' { + export * from 'http'; +} diff --git a/node_modules/@types/node/http2.d.ts b/node_modules/@types/node/http2.d.ts new file mode 100755 index 0000000..0e36826 --- /dev/null +++ b/node_modules/@types/node/http2.d.ts @@ -0,0 +1,2134 @@ +/** + * The `http2` module provides an implementation of the [HTTP/2](https://tools.ietf.org/html/rfc7540) protocol. It + * can be accessed using: + * + * ```js + * const http2 = require('http2'); + * ``` + * @since v8.4.0 + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/http2.js) + */ +declare module 'http2' { + import EventEmitter = require('node:events'); + import * as fs from 'node:fs'; + import * as net from 'node:net'; + import * as stream from 'node:stream'; + import * as tls from 'node:tls'; + import * as url from 'node:url'; + import { IncomingHttpHeaders as Http1IncomingHttpHeaders, OutgoingHttpHeaders, IncomingMessage, ServerResponse } from 'node:http'; + export { OutgoingHttpHeaders } from 'node:http'; + export interface IncomingHttpStatusHeader { + ':status'?: number | undefined; + } + export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { + ':path'?: string | undefined; + ':method'?: string | undefined; + ':authority'?: string | undefined; + ':scheme'?: string | undefined; + } + // Http2Stream + export interface StreamPriorityOptions { + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + silent?: boolean | undefined; + } + export interface StreamState { + localWindowSize?: number | undefined; + state?: number | undefined; + localClose?: number | undefined; + remoteClose?: number | undefined; + sumDependencyWeight?: number | undefined; + weight?: number | undefined; + } + export interface ServerStreamResponseOptions { + endStream?: boolean | undefined; + waitForTrailers?: boolean | undefined; + } + export interface StatOptions { + offset: number; + length: number; + } + export interface ServerStreamFileResponseOptions { + statCheck?(stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions): void | boolean; + waitForTrailers?: boolean | undefined; + offset?: number | undefined; + length?: number | undefined; + } + export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { + onError?(err: NodeJS.ErrnoException): void; + } + export interface Http2Stream extends stream.Duplex { + /** + * Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, + * the `'aborted'` event will have been emitted. + * @since v8.4.0 + */ + readonly aborted: boolean; + /** + * This property shows the number of characters currently buffered to be written. + * See `net.Socket.bufferSize` for details. + * @since v11.2.0, v10.16.0 + */ + readonly bufferSize: number; + /** + * Set to `true` if the `Http2Stream` instance has been closed. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Set to `true` if the `Http2Stream` instance has been destroyed and is no longer + * usable. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Set to `true` if the `END_STREAM` flag was set in the request or response + * HEADERS frame received, indicating that no additional data should be received + * and the readable side of the `Http2Stream` will be closed. + * @since v10.11.0 + */ + readonly endAfterHeaders: boolean; + /** + * The numeric stream identifier of this `Http2Stream` instance. Set to `undefined`if the stream identifier has not yet been assigned. + * @since v8.4.0 + */ + readonly id?: number | undefined; + /** + * Set to `true` if the `Http2Stream` instance has not yet been assigned a + * numeric stream identifier. + * @since v9.4.0 + */ + readonly pending: boolean; + /** + * Set to the `RST_STREAM` `error code` reported when the `Http2Stream` is + * destroyed after either receiving an `RST_STREAM` frame from the connected peer, + * calling `http2stream.close()`, or `http2stream.destroy()`. Will be`undefined` if the `Http2Stream` has not been closed. + * @since v8.4.0 + */ + readonly rstCode: number; + /** + * An object containing the outbound headers sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentHeaders: OutgoingHttpHeaders; + /** + * An array of objects containing the outbound informational (additional) headers + * sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentInfoHeaders?: OutgoingHttpHeaders[] | undefined; + /** + * An object containing the outbound trailers sent for this `HttpStream`. + * @since v9.5.0 + */ + readonly sentTrailers?: OutgoingHttpHeaders | undefined; + /** + * A reference to the `Http2Session` instance that owns this `Http2Stream`. The + * value will be `undefined` after the `Http2Stream` instance is destroyed. + * @since v8.4.0 + */ + readonly session: Http2Session; + /** + * Provides miscellaneous information about the current state of the`Http2Stream`. + * + * A current state of this `Http2Stream`. + * @since v8.4.0 + */ + readonly state: StreamState; + /** + * Closes the `Http2Stream` instance by sending an `RST_STREAM` frame to the + * connected HTTP/2 peer. + * @since v8.4.0 + * @param [code=http2.constants.NGHTTP2_NO_ERROR] Unsigned 32-bit integer identifying the error code. + * @param callback An optional function registered to listen for the `'close'` event. + */ + close(code?: number, callback?: () => void): void; + /** + * Updates the priority for this `Http2Stream` instance. + * @since v8.4.0 + */ + priority(options: StreamPriorityOptions): void; + /** + * ```js + * const http2 = require('http2'); + * const client = http2.connect('http://example.org:8000'); + * const { NGHTTP2_CANCEL } = http2.constants; + * const req = client.request({ ':path': '/' }); + * + * // Cancel the stream if there's no activity after 5 seconds + * req.setTimeout(5000, () => req.close(NGHTTP2_CANCEL)); + * ``` + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Sends a trailing `HEADERS` frame to the connected HTTP/2 peer. This method + * will cause the `Http2Stream` to be immediately closed and must only be + * called after the `'wantTrailers'` event has been emitted. When sending a + * request or sending a response, the `options.waitForTrailers` option must be set + * in order to keep the `Http2Stream` open after the final `DATA` frame so that + * trailers can be sent. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond(undefined, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ xyz: 'abc' }); + * }); + * stream.end('Hello World'); + * }); + * ``` + * + * The HTTP/1 specification forbids trailers from containing HTTP/2 pseudo-header + * fields (e.g. `':method'`, `':path'`, etc). + * @since v10.0.0 + */ + sendTrailers(headers: OutgoingHttpHeaders): void; + addListener(event: 'aborted', listener: () => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'streamClosed', listener: (code: number) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'wantTrailers', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'aborted'): boolean; + emit(event: 'close'): boolean; + emit(event: 'data', chunk: Buffer | string): boolean; + emit(event: 'drain'): boolean; + emit(event: 'end'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'finish'): boolean; + emit(event: 'frameError', frameType: number, errorCode: number): boolean; + emit(event: 'pipe', src: stream.Readable): boolean; + emit(event: 'unpipe', src: stream.Readable): boolean; + emit(event: 'streamClosed', code: number): boolean; + emit(event: 'timeout'): boolean; + emit(event: 'trailers', trailers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'wantTrailers'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'aborted', listener: () => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: Buffer | string) => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: 'streamClosed', listener: (code: number) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'wantTrailers', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'aborted', listener: () => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: Buffer | string) => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: 'streamClosed', listener: (code: number) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'wantTrailers', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'aborted', listener: () => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'streamClosed', listener: (code: number) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'wantTrailers', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'aborted', listener: () => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'streamClosed', listener: (code: number) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'wantTrailers', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Stream extends Http2Stream { + addListener(event: 'continue', listener: () => {}): this; + addListener(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + addListener(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'continue'): boolean; + emit(event: 'headers', headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: 'push', headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'response', headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'continue', listener: () => {}): this; + on(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + on(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'continue', listener: () => {}): this; + once(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + once(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'continue', listener: () => {}): this; + prependListener(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependListener(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'continue', listener: () => {}): this; + prependOnceListener(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependOnceListener(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ServerHttp2Stream extends Http2Stream { + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote + * client's most recent `SETTINGS` frame. Will be `true` if the remote peer + * accepts push streams, `false` otherwise. Settings are the same for every`Http2Stream` in the same `Http2Session`. + * @since v8.4.0 + */ + readonly pushAllowed: boolean; + /** + * Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. + * @since v8.4.0 + */ + additionalHeaders(headers: OutgoingHttpHeaders): void; + /** + * Initiates a push stream. The callback is invoked with the new `Http2Stream`instance created for the push stream passed as the second argument, or an`Error` passed as the first argument. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.pushStream({ ':path': '/' }, (err, pushStream, headers) => { + * if (err) throw err; + * pushStream.respond({ ':status': 200 }); + * pushStream.end('some pushed data'); + * }); + * stream.end('some data'); + * }); + * ``` + * + * Setting the weight of a push stream is not allowed in the `HEADERS` frame. Pass + * a `weight` value to `http2stream.priority` with the `silent` option set to`true` to enable server-side bandwidth balancing between concurrent streams. + * + * Calling `http2stream.pushStream()` from within a pushed stream is not permitted + * and will throw an error. + * @since v8.4.0 + * @param callback Callback that is called once the push stream has been initiated. + */ + pushStream(headers: OutgoingHttpHeaders, callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void): void; + pushStream(headers: OutgoingHttpHeaders, options?: StreamPriorityOptions, callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void): void; + /** + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.end('some data'); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * stream.end('some data'); + * }); + * ``` + * @since v8.4.0 + */ + respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; + /** + * Initiates a response whose data is read from the given file descriptor. No + * validation is performed on the given file descriptor. If an error occurs while + * attempting to read data using the file descriptor, the `Http2Stream` will be + * closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * ```js + * const http2 = require('http2'); + * const fs = require('fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8' + * }; + * stream.respondWithFD(fd, headers); + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given fd. If the `statCheck` function is provided, the`http2stream.respondWithFD()` method will perform an `fs.fstat()` call to + * collect details on the provided file descriptor. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The file descriptor or `FileHandle` is not closed when the stream is closed, + * so it will need to be closed manually once it is no longer needed. + * Using the same file descriptor concurrently for multiple streams + * is not supported and may result in data loss. Re-using a file descriptor + * after a stream has finished is supported. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code _must_ call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('http2'); + * const fs = require('fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8' + * }; + * stream.respondWithFD(fd, headers, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * @since v8.4.0 + * @param fd A readable file descriptor. + */ + respondWithFD(fd: number | fs.promises.FileHandle, headers?: OutgoingHttpHeaders, options?: ServerStreamFileResponseOptions): void; + /** + * Sends a regular file as the response. The `path` must specify a regular file + * or an `'error'` event will be emitted on the `Http2Stream` object. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given file: + * + * If an error occurs while attempting to read the file data, the `Http2Stream`will be closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR`code. If the `onError` callback is + * defined, then it will be called. Otherwise + * the stream will be destroyed. + * + * Example using a file path: + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * headers['last-modified'] = stat.mtime.toUTCString(); + * } + * + * function onError(err) { + * // stream.respond() can throw if the stream has been destroyed by + * // the other side. + * try { + * if (err.code === 'ENOENT') { + * stream.respond({ ':status': 404 }); + * } else { + * stream.respond({ ':status': 500 }); + * } + * } catch (err) { + * // Perform actual error handling. + * console.log(err); + * } + * stream.end(); + * } + * + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck, onError }); + * }); + * ``` + * + * The `options.statCheck` function may also be used to cancel the send operation + * by returning `false`. For instance, a conditional request may check the stat + * results to determine if the file has been modified to return an appropriate`304` response: + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * // Check the stat here... + * stream.respond({ ':status': 304 }); + * return false; // Cancel the send operation + * } + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck }); + * }); + * ``` + * + * The `content-length` header field will be automatically set. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The `options.onError` function may also be used to handle all the errors + * that could happen before the delivery of the file is initiated. The + * default behavior is to destroy the stream. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * }); + * ``` + * @since v8.4.0 + */ + respondWithFile(path: string, headers?: OutgoingHttpHeaders, options?: ServerStreamFileResponseOptionsWithError): void; + } + // Http2Session + export interface Settings { + headerTableSize?: number | undefined; + enablePush?: boolean | undefined; + initialWindowSize?: number | undefined; + maxFrameSize?: number | undefined; + maxConcurrentStreams?: number | undefined; + maxHeaderListSize?: number | undefined; + enableConnectProtocol?: boolean | undefined; + } + export interface ClientSessionRequestOptions { + endStream?: boolean | undefined; + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + waitForTrailers?: boolean | undefined; + signal?: AbortSignal | undefined; + } + export interface SessionState { + effectiveLocalWindowSize?: number | undefined; + effectiveRecvDataLength?: number | undefined; + nextStreamID?: number | undefined; + localWindowSize?: number | undefined; + lastProcStreamID?: number | undefined; + remoteWindowSize?: number | undefined; + outboundQueueSize?: number | undefined; + deflateDynamicTableSize?: number | undefined; + inflateDynamicTableSize?: number | undefined; + } + export interface Http2Session extends EventEmitter { + /** + * Value will be `undefined` if the `Http2Session` is not yet connected to a + * socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or + * will return the value of the connected `TLSSocket`'s own `alpnProtocol`property. + * @since v9.4.0 + */ + readonly alpnProtocol?: string | undefined; + /** + * Will be `true` if this `Http2Session` instance has been closed, otherwise`false`. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Will be `true` if this `Http2Session` instance is still connecting, will be set + * to `false` before emitting `connect` event and/or calling the `http2.connect`callback. + * @since v10.0.0 + */ + readonly connecting: boolean; + /** + * Will be `true` if this `Http2Session` instance has been destroyed and must no + * longer be used, otherwise `false`. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Value is `undefined` if the `Http2Session` session socket has not yet been + * connected, `true` if the `Http2Session` is connected with a `TLSSocket`, + * and `false` if the `Http2Session` is connected to any other kind of socket + * or stream. + * @since v9.4.0 + */ + readonly encrypted?: boolean | undefined; + /** + * A prototype-less object describing the current local settings of this`Http2Session`. The local settings are local to _this_`Http2Session` instance. + * @since v8.4.0 + */ + readonly localSettings: Settings; + /** + * If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property + * will return an `Array` of origins for which the `Http2Session` may be + * considered authoritative. + * + * The `originSet` property is only available when using a secure TLS connection. + * @since v9.4.0 + */ + readonly originSet?: string[] | undefined; + /** + * Indicates whether the `Http2Session` is currently waiting for acknowledgment of + * a sent `SETTINGS` frame. Will be `true` after calling the`http2session.settings()` method. Will be `false` once all sent `SETTINGS`frames have been acknowledged. + * @since v8.4.0 + */ + readonly pendingSettingsAck: boolean; + /** + * A prototype-less object describing the current remote settings of this`Http2Session`. The remote settings are set by the _connected_ HTTP/2 peer. + * @since v8.4.0 + */ + readonly remoteSettings: Settings; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * limits available methods to ones safe to use with HTTP/2. + * + * `destroy`, `emit`, `end`, `pause`, `read`, `resume`, and `write` will throw + * an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for more information. + * + * `setTimeout` method will be called on this `Http2Session`. + * + * All other interactions will be routed directly to the socket. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * Provides miscellaneous information about the current state of the`Http2Session`. + * + * An object describing the current status of this `Http2Session`. + * @since v8.4.0 + */ + readonly state: SessionState; + /** + * The `http2session.type` will be equal to`http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a + * server, and `http2.constants.NGHTTP2_SESSION_CLIENT` if the instance is a + * client. + * @since v8.4.0 + */ + readonly type: number; + /** + * Gracefully closes the `Http2Session`, allowing any existing streams to + * complete on their own and preventing new `Http2Stream` instances from being + * created. Once closed, `http2session.destroy()`_might_ be called if there + * are no open `Http2Stream` instances. + * + * If specified, the `callback` function is registered as a handler for the`'close'` event. + * @since v9.4.0 + */ + close(callback?: () => void): void; + /** + * Immediately terminates the `Http2Session` and the associated `net.Socket` or`tls.TLSSocket`. + * + * Once destroyed, the `Http2Session` will emit the `'close'` event. If `error`is not undefined, an `'error'` event will be emitted immediately before the`'close'` event. + * + * If there are any remaining open `Http2Streams` associated with the`Http2Session`, those will also be destroyed. + * @since v8.4.0 + * @param error An `Error` object if the `Http2Session` is being destroyed due to an error. + * @param code The HTTP/2 error code to send in the final `GOAWAY` frame. If unspecified, and `error` is not undefined, the default is `INTERNAL_ERROR`, otherwise defaults to `NO_ERROR`. + */ + destroy(error?: Error, code?: number): void; + /** + * Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the`Http2Session`. + * @since v9.4.0 + * @param code An HTTP/2 error code + * @param lastStreamID The numeric ID of the last processed `Http2Stream` + * @param opaqueData A `TypedArray` or `DataView` instance containing additional data to be carried within the `GOAWAY` frame. + */ + goaway(code?: number, lastStreamID?: number, opaqueData?: NodeJS.ArrayBufferView): void; + /** + * Sends a `PING` frame to the connected HTTP/2 peer. A `callback` function must + * be provided. The method will return `true` if the `PING` was sent, `false`otherwise. + * + * The maximum number of outstanding (unacknowledged) pings is determined by the`maxOutstandingPings` configuration option. The default maximum is 10. + * + * If provided, the `payload` must be a `Buffer`, `TypedArray`, or `DataView`containing 8 bytes of data that will be transmitted with the `PING` and + * returned with the ping acknowledgment. + * + * The callback will be invoked with three arguments: an error argument that will + * be `null` if the `PING` was successfully acknowledged, a `duration` argument + * that reports the number of milliseconds elapsed since the ping was sent and the + * acknowledgment was received, and a `Buffer` containing the 8-byte `PING`payload. + * + * ```js + * session.ping(Buffer.from('abcdefgh'), (err, duration, payload) => { + * if (!err) { + * console.log(`Ping acknowledged in ${duration} milliseconds`); + * console.log(`With payload '${payload.toString()}'`); + * } + * }); + * ``` + * + * If the `payload` argument is not specified, the default payload will be the + * 64-bit timestamp (little endian) marking the start of the `PING` duration. + * @since v8.9.3 + * @param payload Optional ping payload. + */ + ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + ping(payload: NodeJS.ArrayBufferView, callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + /** + * Calls `ref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + ref(): void; + /** + * Sets the local endpoint's window size. + * The `windowSize` is the total window size to set, not + * the delta. + * + * ```js + * const http2 = require('http2'); + * + * const server = http2.createServer(); + * const expectedWindowSize = 2 ** 20; + * server.on('connect', (session) => { + * + * // Set local window size to be 2 ** 20 + * session.setLocalWindowSize(expectedWindowSize); + * }); + * ``` + * @since v15.3.0, v14.18.0 + */ + setLocalWindowSize(windowSize: number): void; + /** + * Used to set a callback function that is called when there is no activity on + * the `Http2Session` after `msecs` milliseconds. The given `callback` is + * registered as a listener on the `'timeout'` event. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Updates the current local settings for this `Http2Session` and sends a new`SETTINGS` frame to the connected HTTP/2 peer. + * + * Once called, the `http2session.pendingSettingsAck` property will be `true`while the session is waiting for the remote peer to acknowledge the new + * settings. + * + * The new settings will not become effective until the `SETTINGS` acknowledgment + * is received and the `'localSettings'` event is emitted. It is possible to send + * multiple `SETTINGS` frames while acknowledgment is still pending. + * @since v8.4.0 + * @param callback Callback that is called once the session is connected or right away if the session is already connected. + */ + settings(settings: Settings, callback?: (err: Error | null, settings: Settings, duration: number) => void): void; + /** + * Calls `unref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + unref(): void; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + addListener(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + addListener(event: 'localSettings', listener: (settings: Settings) => void): this; + addListener(event: 'ping', listener: () => void): this; + addListener(event: 'remoteSettings', listener: (settings: Settings) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'frameError', frameType: number, errorCode: number, streamID: number): boolean; + emit(event: 'goaway', errorCode: number, lastStreamID: number, opaqueData: Buffer): boolean; + emit(event: 'localSettings', settings: Settings): boolean; + emit(event: 'ping'): boolean; + emit(event: 'remoteSettings', settings: Settings): boolean; + emit(event: 'timeout'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + on(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + on(event: 'localSettings', listener: (settings: Settings) => void): this; + on(event: 'ping', listener: () => void): this; + on(event: 'remoteSettings', listener: (settings: Settings) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + once(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + once(event: 'localSettings', listener: (settings: Settings) => void): this; + once(event: 'ping', listener: () => void): this; + once(event: 'remoteSettings', listener: (settings: Settings) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + prependListener(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + prependListener(event: 'localSettings', listener: (settings: Settings) => void): this; + prependListener(event: 'ping', listener: () => void): this; + prependListener(event: 'remoteSettings', listener: (settings: Settings) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + prependOnceListener(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + prependOnceListener(event: 'localSettings', listener: (settings: Settings) => void): this; + prependOnceListener(event: 'ping', listener: () => void): this; + prependOnceListener(event: 'remoteSettings', listener: (settings: Settings) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Session extends Http2Session { + /** + * For HTTP/2 Client `Http2Session` instances only, the `http2session.request()`creates and returns an `Http2Stream` instance that can be used to send an + * HTTP/2 request to the connected server. + * + * When a `ClientHttp2Session` is first created, the socket may not yet be + * connected. if `clienthttp2session.request()` is called during this time, the + * actual request will be deferred until the socket is ready to go. + * If the `session` is closed before the actual request be executed, an`ERR_HTTP2_GOAWAY_SESSION` is thrown. + * + * This method is only available if `http2session.type` is equal to`http2.constants.NGHTTP2_SESSION_CLIENT`. + * + * ```js + * const http2 = require('http2'); + * const clientSession = http2.connect('https://localhost:1234'); + * const { + * HTTP2_HEADER_PATH, + * HTTP2_HEADER_STATUS + * } = http2.constants; + * + * const req = clientSession.request({ [HTTP2_HEADER_PATH]: '/' }); + * req.on('response', (headers) => { + * console.log(headers[HTTP2_HEADER_STATUS]); + * req.on('data', (chunk) => { // .. }); + * req.on('end', () => { // .. }); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * is emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be called to send trailing + * headers to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * When `options.signal` is set with an `AbortSignal` and then `abort` on the + * corresponding `AbortController` is called, the request will emit an `'error'`event with an `AbortError` error. + * + * The `:method` and `:path` pseudo-headers are not specified within `headers`, + * they respectively default to: + * + * * `:method` \= `'GET'` + * * `:path` \= `/` + * @since v8.4.0 + */ + request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; + addListener(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + addListener(event: 'origin', listener: (origins: string[]) => void): this; + addListener(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + addListener(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'altsvc', alt: string, origin: string, stream: number): boolean; + emit(event: 'origin', origins: ReadonlyArray): boolean; + emit(event: 'connect', session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: 'stream', stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + on(event: 'origin', listener: (origins: string[]) => void): this; + on(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + once(event: 'origin', listener: (origins: string[]) => void): this; + once(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + once(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + prependListener(event: 'origin', listener: (origins: string[]) => void): this; + prependListener(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependListener(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + prependOnceListener(event: 'origin', listener: (origins: string[]) => void): this; + prependOnceListener(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface AlternativeServiceOptions { + origin: number | string | url.URL; + } + export interface ServerHttp2Session extends Http2Session { + readonly server: Http2Server | Http2SecureServer; + /** + * Submits an `ALTSVC` frame (as defined by [RFC 7838](https://tools.ietf.org/html/rfc7838)) to the connected client. + * + * ```js + * const http2 = require('http2'); + * + * const server = http2.createServer(); + * server.on('session', (session) => { + * // Set altsvc for origin https://example.org:80 + * session.altsvc('h2=":8000"', 'https://example.org:80'); + * }); + * + * server.on('stream', (stream) => { + * // Set altsvc for a specific stream + * stream.session.altsvc('h2=":8000"', stream.id); + * }); + * ``` + * + * Sending an `ALTSVC` frame with a specific stream ID indicates that the alternate + * service is associated with the origin of the given `Http2Stream`. + * + * The `alt` and origin string _must_ contain only ASCII bytes and are + * strictly interpreted as a sequence of ASCII bytes. The special value `'clear'`may be passed to clear any previously set alternative service for a given + * domain. + * + * When a string is passed for the `originOrStream` argument, it will be parsed as + * a URL and the origin will be derived. For instance, the origin for the + * HTTP URL `'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as`originOrStream`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * @since v9.4.0 + * @param alt A description of the alternative service configuration as defined by `RFC 7838`. + * @param originOrStream Either a URL string specifying the origin (or an `Object` with an `origin` property) or the numeric identifier of an active `Http2Stream` as given by the + * `http2stream.id` property. + */ + altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; + /** + * Submits an `ORIGIN` frame (as defined by [RFC 8336](https://tools.ietf.org/html/rfc8336)) to the connected client + * to advertise the set of origins for which the server is capable of providing + * authoritative responses. + * + * ```js + * const http2 = require('http2'); + * const options = getSecureOptionsSomehow(); + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * server.on('session', (session) => { + * session.origin('https://example.com', 'https://example.org'); + * }); + * ``` + * + * When a string is passed as an `origin`, it will be parsed as a URL and the + * origin will be derived. For instance, the origin for the HTTP URL`'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given + * string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as + * an `origin`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * + * Alternatively, the `origins` option may be used when creating a new HTTP/2 + * server using the `http2.createSecureServer()` method: + * + * ```js + * const http2 = require('http2'); + * const options = getSecureOptionsSomehow(); + * options.origins = ['https://example.com', 'https://example.org']; + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * ``` + * @since v10.12.0 + * @param origins One or more URL Strings passed as separate arguments. + */ + origin( + ...origins: Array< + | string + | url.URL + | { + origin: string; + } + > + ): void; + addListener(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + addListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'connect', session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: 'stream', stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + once(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + // Http2Server + export interface SessionOptions { + maxDeflateDynamicTableSize?: number | undefined; + maxSessionMemory?: number | undefined; + maxHeaderListPairs?: number | undefined; + maxOutstandingPings?: number | undefined; + maxSendHeaderBlockLength?: number | undefined; + paddingStrategy?: number | undefined; + peerMaxConcurrentStreams?: number | undefined; + settings?: Settings | undefined; + /** + * Specifies a timeout in milliseconds that + * a server should wait when an [`'unknownProtocol'`][] is emitted. If the + * socket has not been destroyed by that time the server will destroy it. + * @default 100000 + */ + unknownProtocolTimeout?: number | undefined; + selectPadding?(frameLen: number, maxFrameLen: number): number; + createConnection?(authority: url.URL, option: SessionOptions): stream.Duplex; + } + export interface ClientSessionOptions extends SessionOptions { + maxReservedRemoteStreams?: number | undefined; + createConnection?: ((authority: url.URL, option: SessionOptions) => stream.Duplex) | undefined; + protocol?: 'http:' | 'https:' | undefined; + } + export interface ServerSessionOptions extends SessionOptions { + Http1IncomingMessage?: typeof IncomingMessage | undefined; + Http1ServerResponse?: typeof ServerResponse | undefined; + Http2ServerRequest?: typeof Http2ServerRequest | undefined; + Http2ServerResponse?: typeof Http2ServerResponse | undefined; + } + export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions {} + export interface SecureServerSessionOptions extends ServerSessionOptions, tls.TlsOptions {} + export interface ServerOptions extends ServerSessionOptions {} + export interface SecureServerOptions extends SecureServerSessionOptions { + allowHTTP1?: boolean | undefined; + origins?: string[] | undefined; + } + interface HTTP2ServerCommon { + setTimeout(msec?: number, callback?: () => void): this; + /** + * Throws ERR_HTTP2_INVALID_SETTING_VALUE for invalid settings values. + * Throws ERR_INVALID_ARG_TYPE for invalid settings argument. + */ + updateSettings(settings: Settings): void; + } + export interface Http2Server extends net.Server, HTTP2ServerCommon { + addListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + addListener(event: 'sessionError', listener: (err: Error) => void): this; + addListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'checkContinue', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'request', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'session', session: ServerHttp2Session): boolean; + emit(event: 'sessionError', err: Error): boolean; + emit(event: 'stream', stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'timeout'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'session', listener: (session: ServerHttp2Session) => void): this; + on(event: 'sessionError', listener: (err: Error) => void): this; + on(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'session', listener: (session: ServerHttp2Session) => void): this; + once(event: 'sessionError', listener: (err: Error) => void): this; + once(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependListener(event: 'sessionError', listener: (err: Error) => void): this; + prependListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: 'sessionError', listener: (err: Error) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface Http2SecureServer extends tls.Server, HTTP2ServerCommon { + addListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + addListener(event: 'sessionError', listener: (err: Error) => void): this; + addListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'checkContinue', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'request', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'session', session: ServerHttp2Session): boolean; + emit(event: 'sessionError', err: Error): boolean; + emit(event: 'stream', stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'timeout'): boolean; + emit(event: 'unknownProtocol', socket: tls.TLSSocket): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'session', listener: (session: ServerHttp2Session) => void): this; + on(event: 'sessionError', listener: (err: Error) => void): this; + on(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'session', listener: (session: ServerHttp2Session) => void): this; + once(event: 'sessionError', listener: (err: Error) => void): this; + once(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependListener(event: 'sessionError', listener: (err: Error) => void): this; + prependListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: 'sessionError', listener: (err: Error) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * A `Http2ServerRequest` object is created by {@link Server} or {@link SecureServer} and passed as the first argument to the `'request'` event. It may be used to access a request status, + * headers, and + * data. + * @since v8.4.0 + */ + export class Http2ServerRequest extends stream.Readable { + constructor(stream: ServerHttp2Stream, headers: IncomingHttpHeaders, options: stream.ReadableOptions, rawHeaders: ReadonlyArray); + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + */ + readonly aborted: boolean; + /** + * The request authority pseudo header field. Because HTTP/2 allows requests + * to set either `:authority` or `host`, this value is derived from`req.headers[':authority']` if present. Otherwise, it is derived from`req.headers['host']`. + * @since v8.4.0 + */ + readonly authority: string; + /** + * See `request.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * The `request.complete` property will be `true` if the request has + * been completed, aborted, or destroyed. + * @since v12.10.0 + */ + readonly complete: boolean; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * See `HTTP/2 Headers Object`. + * + * In HTTP/2, the request path, host name, protocol, and method are represented as + * special headers prefixed with the `:` character (e.g. `':path'`). These special + * headers will be included in the `request.headers` object. Care must be taken not + * to inadvertently modify these special headers or errors may occur. For instance, + * removing all headers from the request will cause errors to occur: + * + * ```js + * removeAllHeaders(request.headers); + * assert(request.url); // Fails because the :path header has been removed + * ``` + * @since v8.4.0 + */ + readonly headers: IncomingHttpHeaders; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. Returns`'2.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v8.4.0 + */ + readonly httpVersion: string; + readonly httpVersionMinor: number; + readonly httpVersionMajor: number; + /** + * The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. + * @since v8.4.0 + */ + readonly method: string; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v8.4.0 + */ + readonly rawHeaders: string[]; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly rawTrailers: string[]; + /** + * The request scheme pseudo header field indicating the scheme + * portion of the target URL. + * @since v8.4.0 + */ + readonly scheme: string; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `request.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`request.stream`. + * + * `setTimeout` method will be called on `request.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. With TLS support, + * use `request.socket.getPeerCertificate()` to obtain the client's + * authentication details. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the request. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly trailers: IncomingHttpHeaders; + /** + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. If the request is: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * Then `request.url` will be: + * + * ```js + * '/status?name=ryan' + * ``` + * + * To parse the url into its parts, `new URL()` can be used: + * + * ```console + * $ node + * > new URL('/status?name=ryan', 'http://example.com') + * URL { + * href: 'http://example.com/status?name=ryan', + * origin: 'http://example.com', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'example.com', + * hostname: 'example.com', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v8.4.0 + */ + url: string; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + read(size?: number): Buffer | string | null; + addListener(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'readable', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'aborted', hadError: boolean, code: number): boolean; + emit(event: 'close'): boolean; + emit(event: 'data', chunk: Buffer | string): boolean; + emit(event: 'end'): boolean; + emit(event: 'readable'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: Buffer | string) => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'readable', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: Buffer | string) => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'readable', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'readable', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'readable', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v8.4.0 + */ + export class Http2ServerResponse extends stream.Writable { + constructor(stream: ServerHttp2Stream); + /** + * See `response.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * Boolean value that indicates whether the response has completed. Starts + * as `false`. After `response.end()` executes, the value will be `true`. + * @since v8.4.0 + * @deprecated Since v13.4.0,v12.16.0 - Use `writableEnded`. + */ + readonly finished: boolean; + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * A reference to the original HTTP2 request object. + * @since v15.7.0 + */ + readonly req: Http2ServerRequest; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `response.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`response.stream`. + * + * `setTimeout` method will be called on `response.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer((req, res) => { + * const ip = req.socket.remoteAddress; + * const port = req.socket.remotePort; + * res.end(`Your IP address is ${ip} and your source port is ${port}.`); + * }).listen(3000); + * ``` + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the response. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * When true, the Date header will be automatically generated and sent in + * the response if it is not already present in the headers. Defaults to true. + * + * This should only be disabled for testing; HTTP requires the Date header + * in responses. + * @since v8.4.0 + */ + sendDate: boolean; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v8.4.0 + */ + statusCode: number; + /** + * Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns + * an empty string. + * @since v8.4.0 + */ + statusMessage: ''; + /** + * This method adds HTTP trailing headers (a header but at the end of the + * message) to the response. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + addTrailers(trailers: OutgoingHttpHeaders): void; + /** + * This method signals to the server that all of the response headers and body + * have been sent; that server should consider this message complete. + * The method, `response.end()`, MUST be called on each response. + * + * If `data` is specified, it is equivalent to calling `response.write(data, encoding)` followed by `response.end(callback)`. + * + * If `callback` is specified, it will be called when the response stream + * is finished. + * @since v8.4.0 + */ + end(callback?: () => void): this; + end(data: string | Uint8Array, callback?: () => void): this; + end(data: string | Uint8Array, encoding: BufferEncoding, callback?: () => void): this; + /** + * Reads out a header that has already been queued but not sent to the client. + * The name is case-insensitive. + * + * ```js + * const contentType = response.getHeader('content-type'); + * ``` + * @since v8.4.0 + */ + getHeader(name: string): string; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All header names are lowercase. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = response.getHeaderNames(); + * // headerNames === ['foo', 'set-cookie'] + * ``` + * @since v8.4.0 + */ + getHeaderNames(): string[]; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow copy + * is used, array values may be mutated without additional calls to various + * header-related http module methods. The keys of the returned object are the + * header names and the values are the respective header values. All header names + * are lowercase. + * + * The object returned by the `response.getHeaders()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = response.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v8.4.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name matching is case-insensitive. + * + * ```js + * const hasContentType = response.hasHeader('content-type'); + * ``` + * @since v8.4.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that has been queued for implicit sending. + * + * ```js + * response.removeHeader('Content-Encoding'); + * ``` + * @since v8.4.0 + */ + removeHeader(name: string): void; + /** + * Sets a single header value for implicit headers. If this header already exists + * in the to-be-sent headers, its value will be replaced. Use an array of strings + * here to send multiple headers with the same name. + * + * ```js + * response.setHeader('Content-Type', 'text/html; charset=utf-8'); + * ``` + * + * or + * + * ```js + * response.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * @since v8.4.0 + */ + setHeader(name: string, value: number | string | ReadonlyArray): void; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * If this method is called and `response.writeHead()` has not been called, + * it will switch to implicit header mode and flush the implicit headers. + * + * This sends a chunk of the response body. This method may + * be called multiple times to provide successive parts of the body. + * + * In the `http` module, the response body is omitted when the + * request is a HEAD request. Similarly, the `204` and `304` responses _must not_ include a message body. + * + * `chunk` can be a string or a buffer. If `chunk` is a string, + * the second parameter specifies how to encode it into a byte stream. + * By default the `encoding` is `'utf8'`. `callback` will be called when this chunk + * of data is flushed. + * + * This is the raw HTTP body and has nothing to do with higher-level multi-part + * body encodings that may be used. + * + * The first time `response.write()` is called, it will send the buffered + * header information and the first chunk of the body to the client. The second + * time `response.write()` is called, Node.js assumes data will be streamed, + * and sends the new data separately. That is, the response is buffered up to the + * first chunk of the body. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is free again. + * @since v8.4.0 + */ + write(chunk: string | Uint8Array, callback?: (err: Error) => void): boolean; + write(chunk: string | Uint8Array, encoding: BufferEncoding, callback?: (err: Error) => void): boolean; + /** + * Sends a status `100 Continue` to the client, indicating that the request body + * should be sent. See the `'checkContinue'` event on `Http2Server` and`Http2SecureServer`. + * @since v8.4.0 + */ + writeContinue(): void; + /** + * Sends a status `103 Early Hints` to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. + * + * Example: + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics' + * }); + * ``` + * + * @since v18.11.0 + * @param hints An object containing the values of headers + */ + writeEarlyHints(hints: Record): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * + * Returns a reference to the `Http2ServerResponse`, so that calls can be chained. + * + * For compatibility with `HTTP/1`, a human-readable `statusMessage` may be + * passed as the second argument. However, because the `statusMessage` has no + * meaning within HTTP/2, the argument will have no effect and a process warning + * will be emitted. + * + * ```js + * const body = 'hello world'; + * response.writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain; charset=utf-8', + * }); + * ``` + * + * `Content-Length` is given in bytes not characters. The`Buffer.byteLength()` API may be used to determine the number of bytes in a + * given encoding. On outbound messages, Node.js does not check if Content-Length + * and the length of the body being transmitted are equal or not. However, when + * receiving messages, Node.js will automatically reject messages when the`Content-Length` does not match the actual payload size. + * + * This method may be called at most one time on a message before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + writeHead(statusCode: number, headers?: OutgoingHttpHeaders): this; + writeHead(statusCode: number, statusMessage: string, headers?: OutgoingHttpHeaders): this; + /** + * Call `http2stream.pushStream()` with the given headers, and wrap the + * given `Http2Stream` on a newly created `Http2ServerResponse` as the callback + * parameter if successful. When `Http2ServerRequest` is closed, the callback is + * called with an error `ERR_HTTP2_INVALID_STREAM`. + * @since v8.4.0 + * @param headers An object describing the headers + * @param callback Called once `http2stream.pushStream()` is finished, or either when the attempt to create the pushed `Http2Stream` has failed or has been rejected, or the state of + * `Http2ServerRequest` is closed prior to calling the `http2stream.pushStream()` method + */ + createPushResponse(headers: OutgoingHttpHeaders, callback: (err: Error | null, res: Http2ServerResponse) => void): void; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (error: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'drain'): boolean; + emit(event: 'error', error: Error): boolean; + emit(event: 'finish'): boolean; + emit(event: 'pipe', src: stream.Readable): boolean; + emit(event: 'unpipe', src: stream.Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (error: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (error: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (error: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (error: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export namespace constants { + const NGHTTP2_SESSION_SERVER: number; + const NGHTTP2_SESSION_CLIENT: number; + const NGHTTP2_STREAM_STATE_IDLE: number; + const NGHTTP2_STREAM_STATE_OPEN: number; + const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; + const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; + const NGHTTP2_STREAM_STATE_CLOSED: number; + const NGHTTP2_NO_ERROR: number; + const NGHTTP2_PROTOCOL_ERROR: number; + const NGHTTP2_INTERNAL_ERROR: number; + const NGHTTP2_FLOW_CONTROL_ERROR: number; + const NGHTTP2_SETTINGS_TIMEOUT: number; + const NGHTTP2_STREAM_CLOSED: number; + const NGHTTP2_FRAME_SIZE_ERROR: number; + const NGHTTP2_REFUSED_STREAM: number; + const NGHTTP2_CANCEL: number; + const NGHTTP2_COMPRESSION_ERROR: number; + const NGHTTP2_CONNECT_ERROR: number; + const NGHTTP2_ENHANCE_YOUR_CALM: number; + const NGHTTP2_INADEQUATE_SECURITY: number; + const NGHTTP2_HTTP_1_1_REQUIRED: number; + const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; + const NGHTTP2_FLAG_NONE: number; + const NGHTTP2_FLAG_END_STREAM: number; + const NGHTTP2_FLAG_END_HEADERS: number; + const NGHTTP2_FLAG_ACK: number; + const NGHTTP2_FLAG_PADDED: number; + const NGHTTP2_FLAG_PRIORITY: number; + const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; + const DEFAULT_SETTINGS_ENABLE_PUSH: number; + const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; + const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; + const MAX_MAX_FRAME_SIZE: number; + const MIN_MAX_FRAME_SIZE: number; + const MAX_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_DEFAULT_WEIGHT: number; + const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; + const NGHTTP2_SETTINGS_ENABLE_PUSH: number; + const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; + const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; + const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; + const PADDING_STRATEGY_NONE: number; + const PADDING_STRATEGY_MAX: number; + const PADDING_STRATEGY_CALLBACK: number; + const HTTP2_HEADER_STATUS: string; + const HTTP2_HEADER_METHOD: string; + const HTTP2_HEADER_AUTHORITY: string; + const HTTP2_HEADER_SCHEME: string; + const HTTP2_HEADER_PATH: string; + const HTTP2_HEADER_ACCEPT_CHARSET: string; + const HTTP2_HEADER_ACCEPT_ENCODING: string; + const HTTP2_HEADER_ACCEPT_LANGUAGE: string; + const HTTP2_HEADER_ACCEPT_RANGES: string; + const HTTP2_HEADER_ACCEPT: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; + const HTTP2_HEADER_AGE: string; + const HTTP2_HEADER_ALLOW: string; + const HTTP2_HEADER_AUTHORIZATION: string; + const HTTP2_HEADER_CACHE_CONTROL: string; + const HTTP2_HEADER_CONNECTION: string; + const HTTP2_HEADER_CONTENT_DISPOSITION: string; + const HTTP2_HEADER_CONTENT_ENCODING: string; + const HTTP2_HEADER_CONTENT_LANGUAGE: string; + const HTTP2_HEADER_CONTENT_LENGTH: string; + const HTTP2_HEADER_CONTENT_LOCATION: string; + const HTTP2_HEADER_CONTENT_MD5: string; + const HTTP2_HEADER_CONTENT_RANGE: string; + const HTTP2_HEADER_CONTENT_TYPE: string; + const HTTP2_HEADER_COOKIE: string; + const HTTP2_HEADER_DATE: string; + const HTTP2_HEADER_ETAG: string; + const HTTP2_HEADER_EXPECT: string; + const HTTP2_HEADER_EXPIRES: string; + const HTTP2_HEADER_FROM: string; + const HTTP2_HEADER_HOST: string; + const HTTP2_HEADER_IF_MATCH: string; + const HTTP2_HEADER_IF_MODIFIED_SINCE: string; + const HTTP2_HEADER_IF_NONE_MATCH: string; + const HTTP2_HEADER_IF_RANGE: string; + const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; + const HTTP2_HEADER_LAST_MODIFIED: string; + const HTTP2_HEADER_LINK: string; + const HTTP2_HEADER_LOCATION: string; + const HTTP2_HEADER_MAX_FORWARDS: string; + const HTTP2_HEADER_PREFER: string; + const HTTP2_HEADER_PROXY_AUTHENTICATE: string; + const HTTP2_HEADER_PROXY_AUTHORIZATION: string; + const HTTP2_HEADER_RANGE: string; + const HTTP2_HEADER_REFERER: string; + const HTTP2_HEADER_REFRESH: string; + const HTTP2_HEADER_RETRY_AFTER: string; + const HTTP2_HEADER_SERVER: string; + const HTTP2_HEADER_SET_COOKIE: string; + const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; + const HTTP2_HEADER_TRANSFER_ENCODING: string; + const HTTP2_HEADER_TE: string; + const HTTP2_HEADER_UPGRADE: string; + const HTTP2_HEADER_USER_AGENT: string; + const HTTP2_HEADER_VARY: string; + const HTTP2_HEADER_VIA: string; + const HTTP2_HEADER_WWW_AUTHENTICATE: string; + const HTTP2_HEADER_HTTP2_SETTINGS: string; + const HTTP2_HEADER_KEEP_ALIVE: string; + const HTTP2_HEADER_PROXY_CONNECTION: string; + const HTTP2_METHOD_ACL: string; + const HTTP2_METHOD_BASELINE_CONTROL: string; + const HTTP2_METHOD_BIND: string; + const HTTP2_METHOD_CHECKIN: string; + const HTTP2_METHOD_CHECKOUT: string; + const HTTP2_METHOD_CONNECT: string; + const HTTP2_METHOD_COPY: string; + const HTTP2_METHOD_DELETE: string; + const HTTP2_METHOD_GET: string; + const HTTP2_METHOD_HEAD: string; + const HTTP2_METHOD_LABEL: string; + const HTTP2_METHOD_LINK: string; + const HTTP2_METHOD_LOCK: string; + const HTTP2_METHOD_MERGE: string; + const HTTP2_METHOD_MKACTIVITY: string; + const HTTP2_METHOD_MKCALENDAR: string; + const HTTP2_METHOD_MKCOL: string; + const HTTP2_METHOD_MKREDIRECTREF: string; + const HTTP2_METHOD_MKWORKSPACE: string; + const HTTP2_METHOD_MOVE: string; + const HTTP2_METHOD_OPTIONS: string; + const HTTP2_METHOD_ORDERPATCH: string; + const HTTP2_METHOD_PATCH: string; + const HTTP2_METHOD_POST: string; + const HTTP2_METHOD_PRI: string; + const HTTP2_METHOD_PROPFIND: string; + const HTTP2_METHOD_PROPPATCH: string; + const HTTP2_METHOD_PUT: string; + const HTTP2_METHOD_REBIND: string; + const HTTP2_METHOD_REPORT: string; + const HTTP2_METHOD_SEARCH: string; + const HTTP2_METHOD_TRACE: string; + const HTTP2_METHOD_UNBIND: string; + const HTTP2_METHOD_UNCHECKOUT: string; + const HTTP2_METHOD_UNLINK: string; + const HTTP2_METHOD_UNLOCK: string; + const HTTP2_METHOD_UPDATE: string; + const HTTP2_METHOD_UPDATEREDIRECTREF: string; + const HTTP2_METHOD_VERSION_CONTROL: string; + const HTTP_STATUS_CONTINUE: number; + const HTTP_STATUS_SWITCHING_PROTOCOLS: number; + const HTTP_STATUS_PROCESSING: number; + const HTTP_STATUS_OK: number; + const HTTP_STATUS_CREATED: number; + const HTTP_STATUS_ACCEPTED: number; + const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; + const HTTP_STATUS_NO_CONTENT: number; + const HTTP_STATUS_RESET_CONTENT: number; + const HTTP_STATUS_PARTIAL_CONTENT: number; + const HTTP_STATUS_MULTI_STATUS: number; + const HTTP_STATUS_ALREADY_REPORTED: number; + const HTTP_STATUS_IM_USED: number; + const HTTP_STATUS_MULTIPLE_CHOICES: number; + const HTTP_STATUS_MOVED_PERMANENTLY: number; + const HTTP_STATUS_FOUND: number; + const HTTP_STATUS_SEE_OTHER: number; + const HTTP_STATUS_NOT_MODIFIED: number; + const HTTP_STATUS_USE_PROXY: number; + const HTTP_STATUS_TEMPORARY_REDIRECT: number; + const HTTP_STATUS_PERMANENT_REDIRECT: number; + const HTTP_STATUS_BAD_REQUEST: number; + const HTTP_STATUS_UNAUTHORIZED: number; + const HTTP_STATUS_PAYMENT_REQUIRED: number; + const HTTP_STATUS_FORBIDDEN: number; + const HTTP_STATUS_NOT_FOUND: number; + const HTTP_STATUS_METHOD_NOT_ALLOWED: number; + const HTTP_STATUS_NOT_ACCEPTABLE: number; + const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; + const HTTP_STATUS_REQUEST_TIMEOUT: number; + const HTTP_STATUS_CONFLICT: number; + const HTTP_STATUS_GONE: number; + const HTTP_STATUS_LENGTH_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_FAILED: number; + const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; + const HTTP_STATUS_URI_TOO_LONG: number; + const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; + const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; + const HTTP_STATUS_EXPECTATION_FAILED: number; + const HTTP_STATUS_TEAPOT: number; + const HTTP_STATUS_MISDIRECTED_REQUEST: number; + const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; + const HTTP_STATUS_LOCKED: number; + const HTTP_STATUS_FAILED_DEPENDENCY: number; + const HTTP_STATUS_UNORDERED_COLLECTION: number; + const HTTP_STATUS_UPGRADE_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_REQUIRED: number; + const HTTP_STATUS_TOO_MANY_REQUESTS: number; + const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; + const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; + const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; + const HTTP_STATUS_NOT_IMPLEMENTED: number; + const HTTP_STATUS_BAD_GATEWAY: number; + const HTTP_STATUS_SERVICE_UNAVAILABLE: number; + const HTTP_STATUS_GATEWAY_TIMEOUT: number; + const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; + const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; + const HTTP_STATUS_INSUFFICIENT_STORAGE: number; + const HTTP_STATUS_LOOP_DETECTED: number; + const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; + const HTTP_STATUS_NOT_EXTENDED: number; + const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; + } + /** + * This symbol can be set as a property on the HTTP/2 headers object with + * an array value in order to provide a list of headers considered sensitive. + */ + export const sensitiveHeaders: symbol; + /** + * Returns an object containing the default settings for an `Http2Session`instance. This method returns a new object instance every time it is called + * so instances returned may be safely modified for use. + * @since v8.4.0 + */ + export function getDefaultSettings(): Settings; + /** + * Returns a `Buffer` instance containing serialized representation of the given + * HTTP/2 settings as specified in the [HTTP/2](https://tools.ietf.org/html/rfc7540) specification. This is intended + * for use with the `HTTP2-Settings` header field. + * + * ```js + * const http2 = require('http2'); + * + * const packed = http2.getPackedSettings({ enablePush: false }); + * + * console.log(packed.toString('base64')); + * // Prints: AAIAAAAA + * ``` + * @since v8.4.0 + */ + export function getPackedSettings(settings: Settings): Buffer; + /** + * Returns a `HTTP/2 Settings Object` containing the deserialized settings from + * the given `Buffer` as generated by `http2.getPackedSettings()`. + * @since v8.4.0 + * @param buf The packed settings. + */ + export function getUnpackedSettings(buf: Uint8Array): Settings; + /** + * Returns a `net.Server` instance that creates and manages `Http2Session`instances. + * + * Since there are no browsers known that support [unencrypted HTTP/2](https://http2.github.io/faq/#does-http2-require-encryption), the use of {@link createSecureServer} is necessary when + * communicating + * with browser clients. + * + * ```js + * const http2 = require('http2'); + * + * // Create an unencrypted HTTP/2 server. + * // Since there are no browsers known that support + * // unencrypted HTTP/2, the use of `http2.createSecureServer()` + * // is necessary when communicating with browser clients. + * const server = http2.createServer(); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200 + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(80); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createServer(onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2Server; + export function createServer(options: ServerOptions, onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2Server; + /** + * Returns a `tls.Server` instance that creates and manages `Http2Session`instances. + * + * ```js + * const http2 = require('http2'); + * const fs = require('fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem') + * }; + * + * // Create a secure HTTP/2 server + * const server = http2.createSecureServer(options); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200 + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(80); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createSecureServer(onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2SecureServer; + export function createSecureServer(options: SecureServerOptions, onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2SecureServer; + /** + * Returns a `ClientHttp2Session` instance. + * + * ```js + * const http2 = require('http2'); + * const client = http2.connect('https://localhost:1234'); + * + * // Use the client + * + * client.close(); + * ``` + * @since v8.4.0 + * @param authority The remote HTTP/2 server to connect to. This must be in the form of a minimal, valid URL with the `http://` or `https://` prefix, host name, and IP port (if a non-default port + * is used). Userinfo (user ID and password), path, querystring, and fragment details in the URL will be ignored. + * @param listener Will be registered as a one-time listener of the {@link 'connect'} event. + */ + export function connect(authority: string | url.URL, listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): ClientHttp2Session; + export function connect( + authority: string | url.URL, + options?: ClientSessionOptions | SecureClientSessionOptions, + listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void + ): ClientHttp2Session; +} +declare module 'node:http2' { + export * from 'http2'; +} diff --git a/node_modules/@types/node/https.d.ts b/node_modules/@types/node/https.d.ts new file mode 100755 index 0000000..aae4a95 --- /dev/null +++ b/node_modules/@types/node/https.d.ts @@ -0,0 +1,541 @@ +/** + * HTTPS is the HTTP protocol over TLS/SSL. In Node.js this is implemented as a + * separate module. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/https.js) + */ +declare module 'https' { + import { Duplex } from 'node:stream'; + import * as tls from 'node:tls'; + import * as http from 'node:http'; + import { URL } from 'node:url'; + type ServerOptions< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > = tls.SecureContextOptions & tls.TlsOptions & http.ServerOptions; + type RequestOptions = http.RequestOptions & + tls.SecureContextOptions & { + rejectUnauthorized?: boolean | undefined; // Defaults to true + servername?: string | undefined; // SNI TLS Extension + }; + interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { + rejectUnauthorized?: boolean | undefined; + maxCachedSessions?: number | undefined; + } + /** + * An `Agent` object for HTTPS similar to `http.Agent`. See {@link request} for more information. + * @since v0.4.5 + */ + class Agent extends http.Agent { + constructor(options?: AgentOptions); + options: AgentOptions; + } + interface Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends http.Server {} + /** + * See `http.Server` for more information. + * @since v0.3.4 + */ + class Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends tls.Server { + constructor(requestListener?: http.RequestListener); + constructor( + options: ServerOptions, + requestListener?: http.RequestListener, + ); + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + addListener( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + addListener( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + addListener(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + addListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connection', listener: (socket: Duplex) => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'checkContinue', listener: http.RequestListener): this; + addListener(event: 'checkExpectation', listener: http.RequestListener): this; + addListener(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + addListener( + event: 'connect', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + addListener(event: 'request', listener: http.RequestListener): this; + addListener( + event: 'upgrade', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: 'keylog', line: Buffer, tlsSocket: tls.TLSSocket): boolean; + emit( + event: 'newSession', + sessionId: Buffer, + sessionData: Buffer, + callback: (err: Error, resp: Buffer) => void, + ): boolean; + emit( + event: 'OCSPRequest', + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit(event: 'resumeSession', sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void): boolean; + emit(event: 'secureConnection', tlsSocket: tls.TLSSocket): boolean; + emit(event: 'tlsClientError', err: Error, tlsSocket: tls.TLSSocket): boolean; + emit(event: 'close'): boolean; + emit(event: 'connection', socket: Duplex): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit( + event: 'checkContinue', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: 'checkExpectation', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'clientError', err: Error, socket: Duplex): boolean; + emit(event: 'connect', req: InstanceType, socket: Duplex, head: Buffer): boolean; + emit( + event: 'request', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'upgrade', req: InstanceType, socket: Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + on( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + on( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + on(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + on(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connection', listener: (socket: Duplex) => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'checkContinue', listener: http.RequestListener): this; + on(event: 'checkExpectation', listener: http.RequestListener): this; + on(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + on(event: 'connect', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + on(event: 'request', listener: http.RequestListener): this; + on(event: 'upgrade', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + once( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + once( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + once(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + once(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connection', listener: (socket: Duplex) => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'checkContinue', listener: http.RequestListener): this; + once(event: 'checkExpectation', listener: http.RequestListener): this; + once(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + once(event: 'connect', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: 'request', listener: http.RequestListener): this; + once(event: 'upgrade', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependListener( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependListener( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependListener(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connection', listener: (socket: Duplex) => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'checkContinue', listener: http.RequestListener): this; + prependListener(event: 'checkExpectation', listener: http.RequestListener): this; + prependListener(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + prependListener( + event: 'connect', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependListener(event: 'request', listener: http.RequestListener): this; + prependListener( + event: 'upgrade', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependOnceListener( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependOnceListener(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connection', listener: (socket: Duplex) => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'checkContinue', listener: http.RequestListener): this; + prependOnceListener(event: 'checkExpectation', listener: http.RequestListener): this; + prependOnceListener(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + prependOnceListener( + event: 'connect', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: 'request', listener: http.RequestListener): this; + prependOnceListener( + event: 'upgrade', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + } + /** + * ```js + * // curl -k https://localhost:8000/ + * const https = require('https'); + * const fs = require('fs'); + * + * const options = { + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem') + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * + * Or + * + * ```js + * const https = require('https'); + * const fs = require('fs'); + * + * const options = { + * pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + * passphrase: 'sample' + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * @since v0.3.4 + * @param options Accepts `options` from `createServer`, `createSecureContext` and `createServer`. + * @param requestListener A listener to be added to the `'request'` event. + */ + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >(requestListener?: http.RequestListener): Server; + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >( + options: ServerOptions, + requestListener?: http.RequestListener, + ): Server; + /** + * Makes a request to a secure web server. + * + * The following additional `options` from `tls.connect()` are also accepted:`ca`, `cert`, `ciphers`, `clientCertEngine`, `crl`, `dhparam`, `ecdhCurve`,`honorCipherOrder`, `key`, `passphrase`, + * `pfx`, `rejectUnauthorized`,`secureOptions`, `secureProtocol`, `servername`, `sessionIdContext`,`highWaterMark`. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * `https.request()` returns an instance of the `http.ClientRequest` class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const https = require('https'); + * + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET' + * }; + * + * const req = https.request(options, (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(e); + * }); + * req.end(); + * ``` + * + * Example using options from `tls.connect()`: + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem') + * }; + * options.agent = new https.Agent(options); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Alternatively, opt out of connection pooling by not using an `Agent`. + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * agent: false + * }; + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('https://abc:xyz@example.com'); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example pinning on certificate fingerprint, or the public key (similar to`pin-sha256`): + * + * ```js + * const tls = require('tls'); + * const https = require('https'); + * const crypto = require('crypto'); + * + * function sha256(s) { + * return crypto.createHash('sha256').update(s).digest('base64'); + * } + * const options = { + * hostname: 'github.com', + * port: 443, + * path: '/', + * method: 'GET', + * checkServerIdentity: function(host, cert) { + * // Make sure the certificate is issued to the host we are connected to + * const err = tls.checkServerIdentity(host, cert); + * if (err) { + * return err; + * } + * + * // Pin the public key, similar to HPKP pin-sha25 pinning + * const pubkey256 = 'pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU='; + * if (sha256(cert.pubkey) !== pubkey256) { + * const msg = 'Certificate verification error: ' + + * `The public key of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // Pin the exact certificate, rather than the pub key + * const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + + * 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; + * if (cert.fingerprint256 !== cert256) { + * const msg = 'Certificate verification error: ' + + * `The certificate of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // This loop is informational only. + * // Print the certificate and public key fingerprints of all certs in the + * // chain. Its common to pin the public key of the issuer on the public + * // internet, while pinning the public key of the service in sensitive + * // environments. + * do { + * console.log('Subject Common Name:', cert.subject.CN); + * console.log(' Certificate SHA256 fingerprint:', cert.fingerprint256); + * + * hash = crypto.createHash('sha256'); + * console.log(' Public key ping-sha256:', sha256(cert.pubkey)); + * + * lastprint256 = cert.fingerprint256; + * cert = cert.issuerCertificate; + * } while (cert.fingerprint256 !== lastprint256); + * + * }, + * }; + * + * options.agent = new https.Agent(options); + * const req = https.request(options, (res) => { + * console.log('All OK. Server matched our pinned cert or public key'); + * console.log('statusCode:', res.statusCode); + * // Print the HPKP values + * console.log('headers:', res.headers['public-key-pins']); + * + * res.on('data', (d) => {}); + * }); + * + * req.on('error', (e) => { + * console.error(e.message); + * }); + * req.end(); + * ``` + * + * Outputs for example: + * + * ```text + * Subject Common Name: github.com + * Certificate SHA256 fingerprint: 25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16 + * Public key ping-sha256: pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU= + * Subject Common Name: DigiCert SHA2 Extended Validation Server CA + * Certificate SHA256 fingerprint: 40:3E:06:2A:26:53:05:91:13:28:5B:AF:80:A0:D4:AE:42:2C:84:8C:9F:78:FA:D0:1F:C9:4B:C5:B8:7F:EF:1A + * Public key ping-sha256: RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho= + * Subject Common Name: DigiCert High Assurance EV Root CA + * Certificate SHA256 fingerprint: 74:31:E5:F4:C3:C1:CE:46:90:77:4F:0B:61:E0:54:40:88:3B:A9:A0:1E:D0:0B:A6:AB:D7:80:6E:D3:B1:18:CF + * Public key ping-sha256: WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18= + * All OK. Server matched our pinned cert or public key + * statusCode: 200 + * headers: max-age=0; pin-sha256="WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18="; pin-sha256="RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho="; + * pin-sha256="k2v657xBsOVe1PQRwOsHsw3bsGT2VzIqz5K+59sNQws="; pin-sha256="K87oWBWM9UZfyddvDfoxL+8lpNyoUB2ptGtn0fv6G2Q="; pin-sha256="IQBnNBEiFuhj+8x6X8XLgh01V9Ic5/V3IRQLNFFc7v4="; + * pin-sha256="iie1VXtL7HzAMF+/PVPR9xzT80kQxdZeJ+zduCB3uj0="; pin-sha256="LvRiGEjRqfzurezaWuj8Wie2gyHMrW5Q06LspMnox7A="; includeSubDomains + * ``` + * @since v0.3.6 + * @param options Accepts all `options` from `request`, with some differences in default values: + */ + function request( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + /** + * Like `http.get()` but for HTTPS. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * ```js + * const https = require('https'); + * + * https.get('https://encrypted.google.com/', (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * + * }).on('error', (e) => { + * console.error(e); + * }); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. + */ + function get( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function get( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + let globalAgent: Agent; +} +declare module 'node:https' { + export * from 'https'; +} diff --git a/node_modules/@types/node/index.d.ts b/node_modules/@types/node/index.d.ts new file mode 100755 index 0000000..4baa378 --- /dev/null +++ b/node_modules/@types/node/index.d.ts @@ -0,0 +1,133 @@ +// Type definitions for non-npm package Node.js 18.11 +// Project: https://nodejs.org/ +// Definitions by: Microsoft TypeScript +// DefinitelyTyped +// Alberto Schiabel +// Alvis HT Tang +// Andrew Makarov +// Benjamin Toueg +// Chigozirim C. +// David Junger +// Deividas Bakanas +// Eugene Y. Q. Shen +// Hannes Magnusson +// Huw +// Kelvin Jin +// Klaus Meinhardt +// Lishude +// Mariusz Wiktorczyk +// Mohsen Azimi +// Nicolas Even +// Nikita Galkin +// Parambir Singh +// Sebastian Silbermann +// Simon Schick +// Thomas den Hollander +// Wilco Bakker +// wwwy3y3 +// Samuel Ainsworth +// Kyle Uehlein +// Thanik Bhongbhibhat +// Marcin Kopacz +// Trivikram Kamat +// Junxiao Shi +// Ilia Baryshnikov +// ExE Boss +// Piotr Błażejewicz +// Anna Henningsen +// Victor Perin +// Yongsheng Zhang +// NodeJS Contributors +// Linus Unnebäck +// wafuwafu13 +// Matteo Collina +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support NodeJS and TypeScript 4.9+. + +// Reference required types from the default lib: +/// +/// +/// +/// + +// Base definitions for all NodeJS modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// + +/// diff --git a/node_modules/@types/node/inspector.d.ts b/node_modules/@types/node/inspector.d.ts new file mode 100755 index 0000000..eba0b55 --- /dev/null +++ b/node_modules/@types/node/inspector.d.ts @@ -0,0 +1,2741 @@ +// eslint-disable-next-line dt-header +// Type definitions for inspector + +// These definitions are auto-generated. +// Please see https://github.com/DefinitelyTyped/DefinitelyTyped/pull/19330 +// for more information. + +// tslint:disable:max-line-length + +/** + * The `inspector` module provides an API for interacting with the V8 inspector. + * + * It can be accessed using: + * + * ```js + * const inspector = require('inspector'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/inspector.js) + */ +declare module 'inspector' { + import EventEmitter = require('node:events'); + interface InspectorNotification { + method: string; + params: T; + } + namespace Schema { + /** + * Description of the protocol domain. + */ + interface Domain { + /** + * Domain name. + */ + name: string; + /** + * Domain version. + */ + version: string; + } + interface GetDomainsReturnType { + /** + * List of supported domains. + */ + domains: Domain[]; + } + } + namespace Runtime { + /** + * Unique script identifier. + */ + type ScriptId = string; + /** + * Unique object identifier. + */ + type RemoteObjectId = string; + /** + * Primitive value which cannot be JSON-stringified. + */ + type UnserializableValue = string; + /** + * Mirror object referencing original JavaScript object. + */ + interface RemoteObject { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * Object class (constructor) name. Specified for object type values only. + */ + className?: string | undefined; + /** + * Remote object value in case of primitive values or JSON values (if it was requested). + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified does not have value, but gets this property. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * Unique object identifier (for non-primitive values). + */ + objectId?: RemoteObjectId | undefined; + /** + * Preview containing abbreviated property values. Specified for object type values only. + * @experimental + */ + preview?: ObjectPreview | undefined; + /** + * @experimental + */ + customPreview?: CustomPreview | undefined; + } + /** + * @experimental + */ + interface CustomPreview { + header: string; + hasBody: boolean; + formatterObjectId: RemoteObjectId; + bindRemoteObjectFunctionId: RemoteObjectId; + configObjectId?: RemoteObjectId | undefined; + } + /** + * Object containing abbreviated remote object value. + * @experimental + */ + interface ObjectPreview { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * True iff some of the properties or entries of the original object did not fit. + */ + overflow: boolean; + /** + * List of the properties. + */ + properties: PropertyPreview[]; + /** + * List of the entries. Specified for map and set subtype values only. + */ + entries?: EntryPreview[] | undefined; + } + /** + * @experimental + */ + interface PropertyPreview { + /** + * Property name. + */ + name: string; + /** + * Object type. Accessor means that the property itself is an accessor property. + */ + type: string; + /** + * User-friendly property value string. + */ + value?: string | undefined; + /** + * Nested value preview. + */ + valuePreview?: ObjectPreview | undefined; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + } + /** + * @experimental + */ + interface EntryPreview { + /** + * Preview of the key. Specified for map-like collection entries. + */ + key?: ObjectPreview | undefined; + /** + * Preview of the value. + */ + value: ObjectPreview; + } + /** + * Object property descriptor. + */ + interface PropertyDescriptor { + /** + * Property name or symbol description. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + /** + * True if the value associated with the property may be changed (data descriptors only). + */ + writable?: boolean | undefined; + /** + * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). + */ + get?: RemoteObject | undefined; + /** + * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). + */ + set?: RemoteObject | undefined; + /** + * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. + */ + configurable: boolean; + /** + * True if this property shows up during enumeration of the properties on the corresponding object. + */ + enumerable: boolean; + /** + * True if the result was thrown during the evaluation. + */ + wasThrown?: boolean | undefined; + /** + * True if the property is owned for the object. + */ + isOwn?: boolean | undefined; + /** + * Property symbol object, if the property is of the symbol type. + */ + symbol?: RemoteObject | undefined; + } + /** + * Object internal property descriptor. This property isn't normally visible in JavaScript code. + */ + interface InternalPropertyDescriptor { + /** + * Conventional property name. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + } + /** + * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. + */ + interface CallArgument { + /** + * Primitive value or serializable javascript object. + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * Remote object handle. + */ + objectId?: RemoteObjectId | undefined; + } + /** + * Id of an execution context. + */ + type ExecutionContextId = number; + /** + * Description of an isolated world. + */ + interface ExecutionContextDescription { + /** + * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. + */ + id: ExecutionContextId; + /** + * Execution context origin. + */ + origin: string; + /** + * Human readable name describing given context. + */ + name: string; + /** + * Embedder-specific auxiliary data. + */ + auxData?: {} | undefined; + } + /** + * Detailed information about exception (or error) that was thrown during script compilation or execution. + */ + interface ExceptionDetails { + /** + * Exception id. + */ + exceptionId: number; + /** + * Exception text, which should be used together with exception object when available. + */ + text: string; + /** + * Line number of the exception location (0-based). + */ + lineNumber: number; + /** + * Column number of the exception location (0-based). + */ + columnNumber: number; + /** + * Script ID of the exception location. + */ + scriptId?: ScriptId | undefined; + /** + * URL of the exception location, to be used when the script was not reported. + */ + url?: string | undefined; + /** + * JavaScript stack trace if available. + */ + stackTrace?: StackTrace | undefined; + /** + * Exception object if available. + */ + exception?: RemoteObject | undefined; + /** + * Identifier of the context where exception happened. + */ + executionContextId?: ExecutionContextId | undefined; + } + /** + * Number of milliseconds since epoch. + */ + type Timestamp = number; + /** + * Stack entry for runtime errors and assertions. + */ + interface CallFrame { + /** + * JavaScript function name. + */ + functionName: string; + /** + * JavaScript script id. + */ + scriptId: ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * JavaScript script line number (0-based). + */ + lineNumber: number; + /** + * JavaScript script column number (0-based). + */ + columnNumber: number; + } + /** + * Call frames for assertions or error messages. + */ + interface StackTrace { + /** + * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. + */ + description?: string | undefined; + /** + * JavaScript function name. + */ + callFrames: CallFrame[]; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + */ + parent?: StackTrace | undefined; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + * @experimental + */ + parentId?: StackTraceId | undefined; + } + /** + * Unique identifier of current debugger. + * @experimental + */ + type UniqueDebuggerId = string; + /** + * If debuggerId is set stack trace comes from another debugger and can be resolved there. This allows to track cross-debugger calls. See Runtime.StackTrace and Debugger.paused for usages. + * @experimental + */ + interface StackTraceId { + id: string; + debuggerId?: UniqueDebuggerId | undefined; + } + interface EvaluateParameterType { + /** + * Expression to evaluate. + */ + expression: string; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + contextId?: ExecutionContextId | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface AwaitPromiseParameterType { + /** + * Identifier of the promise. + */ + promiseObjectId: RemoteObjectId; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + } + interface CallFunctionOnParameterType { + /** + * Declaration of the function to call. + */ + functionDeclaration: string; + /** + * Identifier of the object to call function on. Either objectId or executionContextId should be specified. + */ + objectId?: RemoteObjectId | undefined; + /** + * Call arguments. All call arguments must belong to the same JavaScript world as the target object. + */ + arguments?: CallArgument[] | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + /** + * Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. + */ + objectGroup?: string | undefined; + } + interface GetPropertiesParameterType { + /** + * Identifier of the object to return properties for. + */ + objectId: RemoteObjectId; + /** + * If true, returns properties belonging only to the element itself, not to its prototype chain. + */ + ownProperties?: boolean | undefined; + /** + * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. + * @experimental + */ + accessorPropertiesOnly?: boolean | undefined; + /** + * Whether preview should be generated for the results. + * @experimental + */ + generatePreview?: boolean | undefined; + } + interface ReleaseObjectParameterType { + /** + * Identifier of the object to release. + */ + objectId: RemoteObjectId; + } + interface ReleaseObjectGroupParameterType { + /** + * Symbolic object group name. + */ + objectGroup: string; + } + interface SetCustomObjectFormatterEnabledParameterType { + enabled: boolean; + } + interface CompileScriptParameterType { + /** + * Expression to compile. + */ + expression: string; + /** + * Source url to be set for the script. + */ + sourceURL: string; + /** + * Specifies whether the compiled script should be persisted. + */ + persistScript: boolean; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface RunScriptParameterType { + /** + * Id of the script to run. + */ + scriptId: ScriptId; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface QueryObjectsParameterType { + /** + * Identifier of the prototype to return objects for. + */ + prototypeObjectId: RemoteObjectId; + } + interface GlobalLexicalScopeNamesParameterType { + /** + * Specifies in which execution context to lookup global scope variables. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface EvaluateReturnType { + /** + * Evaluation result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface AwaitPromiseReturnType { + /** + * Promise result. Will contain rejected value if promise was rejected. + */ + result: RemoteObject; + /** + * Exception details if stack strace is available. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CallFunctionOnReturnType { + /** + * Call result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface GetPropertiesReturnType { + /** + * Object properties. + */ + result: PropertyDescriptor[]; + /** + * Internal object properties (only of the element itself). + */ + internalProperties?: InternalPropertyDescriptor[] | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CompileScriptReturnType { + /** + * Id of the script. + */ + scriptId?: ScriptId | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface RunScriptReturnType { + /** + * Run result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface QueryObjectsReturnType { + /** + * Array with objects. + */ + objects: RemoteObject; + } + interface GlobalLexicalScopeNamesReturnType { + names: string[]; + } + interface ExecutionContextCreatedEventDataType { + /** + * A newly created execution context. + */ + context: ExecutionContextDescription; + } + interface ExecutionContextDestroyedEventDataType { + /** + * Id of the destroyed context + */ + executionContextId: ExecutionContextId; + } + interface ExceptionThrownEventDataType { + /** + * Timestamp of the exception. + */ + timestamp: Timestamp; + exceptionDetails: ExceptionDetails; + } + interface ExceptionRevokedEventDataType { + /** + * Reason describing why exception was revoked. + */ + reason: string; + /** + * The id of revoked exception, as reported in exceptionThrown. + */ + exceptionId: number; + } + interface ConsoleAPICalledEventDataType { + /** + * Type of the call. + */ + type: string; + /** + * Call arguments. + */ + args: RemoteObject[]; + /** + * Identifier of the context where the call was made. + */ + executionContextId: ExecutionContextId; + /** + * Call timestamp. + */ + timestamp: Timestamp; + /** + * Stack trace captured when the call was made. + */ + stackTrace?: StackTrace | undefined; + /** + * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. + * @experimental + */ + context?: string | undefined; + } + interface InspectRequestedEventDataType { + object: RemoteObject; + hints: {}; + } + } + namespace Debugger { + /** + * Breakpoint identifier. + */ + type BreakpointId = string; + /** + * Call frame identifier. + */ + type CallFrameId = string; + /** + * Location in the source code. + */ + interface Location { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + } + /** + * Location in the source code. + * @experimental + */ + interface ScriptPosition { + lineNumber: number; + columnNumber: number; + } + /** + * JavaScript call frame. Array of call frames form the call stack. + */ + interface CallFrame { + /** + * Call frame identifier. This identifier is only valid while the virtual machine is paused. + */ + callFrameId: CallFrameId; + /** + * Name of the JavaScript function called on this call frame. + */ + functionName: string; + /** + * Location in the source code. + */ + functionLocation?: Location | undefined; + /** + * Location in the source code. + */ + location: Location; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Scope chain for this call frame. + */ + scopeChain: Scope[]; + /** + * this object for this call frame. + */ + this: Runtime.RemoteObject; + /** + * The value being returned, if the function is at return point. + */ + returnValue?: Runtime.RemoteObject | undefined; + } + /** + * Scope description. + */ + interface Scope { + /** + * Scope type. + */ + type: string; + /** + * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. + */ + object: Runtime.RemoteObject; + name?: string | undefined; + /** + * Location in the source code where scope starts + */ + startLocation?: Location | undefined; + /** + * Location in the source code where scope ends + */ + endLocation?: Location | undefined; + } + /** + * Search match for resource. + */ + interface SearchMatch { + /** + * Line number in resource content. + */ + lineNumber: number; + /** + * Line with match content. + */ + lineContent: string; + } + interface BreakLocation { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + type?: string | undefined; + } + interface SetBreakpointsActiveParameterType { + /** + * New value for breakpoints active state. + */ + active: boolean; + } + interface SetSkipAllPausesParameterType { + /** + * New value for skip pauses state. + */ + skip: boolean; + } + interface SetBreakpointByUrlParameterType { + /** + * Line number to set breakpoint at. + */ + lineNumber: number; + /** + * URL of the resources to set breakpoint on. + */ + url?: string | undefined; + /** + * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. + */ + urlRegex?: string | undefined; + /** + * Script hash of the resources to set breakpoint on. + */ + scriptHash?: string | undefined; + /** + * Offset in the line to set breakpoint at. + */ + columnNumber?: number | undefined; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface SetBreakpointParameterType { + /** + * Location to set breakpoint in. + */ + location: Location; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface RemoveBreakpointParameterType { + breakpointId: BreakpointId; + } + interface GetPossibleBreakpointsParameterType { + /** + * Start of range to search possible breakpoint locations in. + */ + start: Location; + /** + * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. + */ + end?: Location | undefined; + /** + * Only consider locations which are in the same (non-nested) function as start. + */ + restrictToFunction?: boolean | undefined; + } + interface ContinueToLocationParameterType { + /** + * Location to continue to. + */ + location: Location; + targetCallFrames?: string | undefined; + } + interface PauseOnAsyncCallParameterType { + /** + * Debugger will pause when async call with given stack trace is started. + */ + parentStackTraceId: Runtime.StackTraceId; + } + interface StepIntoParameterType { + /** + * Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause. + * @experimental + */ + breakOnAsyncCall?: boolean | undefined; + } + interface GetStackTraceParameterType { + stackTraceId: Runtime.StackTraceId; + } + interface SearchInContentParameterType { + /** + * Id of the script to search in. + */ + scriptId: Runtime.ScriptId; + /** + * String to search for. + */ + query: string; + /** + * If true, search is case sensitive. + */ + caseSensitive?: boolean | undefined; + /** + * If true, treats string parameter as regex. + */ + isRegex?: boolean | undefined; + } + interface SetScriptSourceParameterType { + /** + * Id of the script to edit. + */ + scriptId: Runtime.ScriptId; + /** + * New content of the script. + */ + scriptSource: string; + /** + * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + */ + dryRun?: boolean | undefined; + } + interface RestartFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + } + interface GetScriptSourceParameterType { + /** + * Id of the script to get source for. + */ + scriptId: Runtime.ScriptId; + } + interface SetPauseOnExceptionsParameterType { + /** + * Pause on exceptions mode. + */ + state: string; + } + interface EvaluateOnCallFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + /** + * Expression to evaluate. + */ + expression: string; + /** + * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). + */ + objectGroup?: string | undefined; + /** + * Specifies whether command line API should be available to the evaluated expression, defaults to false. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether to throw an exception if side effect cannot be ruled out during evaluation. + */ + throwOnSideEffect?: boolean | undefined; + } + interface SetVariableValueParameterType { + /** + * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. + */ + scopeNumber: number; + /** + * Variable name. + */ + variableName: string; + /** + * New variable value. + */ + newValue: Runtime.CallArgument; + /** + * Id of callframe that holds variable. + */ + callFrameId: CallFrameId; + } + interface SetReturnValueParameterType { + /** + * New return value. + */ + newValue: Runtime.CallArgument; + } + interface SetAsyncCallStackDepthParameterType { + /** + * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). + */ + maxDepth: number; + } + interface SetBlackboxPatternsParameterType { + /** + * Array of regexps that will be used to check script url for blackbox state. + */ + patterns: string[]; + } + interface SetBlackboxedRangesParameterType { + /** + * Id of the script. + */ + scriptId: Runtime.ScriptId; + positions: ScriptPosition[]; + } + interface EnableReturnType { + /** + * Unique identifier of the debugger. + * @experimental + */ + debuggerId: Runtime.UniqueDebuggerId; + } + interface SetBreakpointByUrlReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * List of the locations this breakpoint resolved into upon addition. + */ + locations: Location[]; + } + interface SetBreakpointReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * Location this breakpoint resolved into. + */ + actualLocation: Location; + } + interface GetPossibleBreakpointsReturnType { + /** + * List of the possible breakpoint locations. + */ + locations: BreakLocation[]; + } + interface GetStackTraceReturnType { + stackTrace: Runtime.StackTrace; + } + interface SearchInContentReturnType { + /** + * List of search matches. + */ + result: SearchMatch[]; + } + interface SetScriptSourceReturnType { + /** + * New stack trace in case editing has happened while VM was stopped. + */ + callFrames?: CallFrame[] | undefined; + /** + * Whether current call stack was modified after applying the changes. + */ + stackChanged?: boolean | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Exception details if any. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface RestartFrameReturnType { + /** + * New stack trace. + */ + callFrames: CallFrame[]; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + } + interface GetScriptSourceReturnType { + /** + * Script source. + */ + scriptSource: string; + } + interface EvaluateOnCallFrameReturnType { + /** + * Object wrapper for the evaluation result. + */ + result: Runtime.RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface ScriptParsedEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * True, if this script is generated as a result of the live edit operation. + * @experimental + */ + isLiveEdit?: boolean | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface ScriptFailedToParseEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface BreakpointResolvedEventDataType { + /** + * Breakpoint unique identifier. + */ + breakpointId: BreakpointId; + /** + * Actual breakpoint location. + */ + location: Location; + } + interface PausedEventDataType { + /** + * Call stack the virtual machine stopped on. + */ + callFrames: CallFrame[]; + /** + * Pause reason. + */ + reason: string; + /** + * Object containing break-specific auxiliary properties. + */ + data?: {} | undefined; + /** + * Hit breakpoints IDs + */ + hitBreakpoints?: string[] | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Just scheduled async call will have this stack trace as parent stack during async execution. This field is available only after Debugger.stepInto call with breakOnAsynCall flag. + * @experimental + */ + asyncCallStackTraceId?: Runtime.StackTraceId | undefined; + } + } + namespace Console { + /** + * Console message. + */ + interface ConsoleMessage { + /** + * Message source. + */ + source: string; + /** + * Message severity. + */ + level: string; + /** + * Message text. + */ + text: string; + /** + * URL of the message origin. + */ + url?: string | undefined; + /** + * Line number in the resource that generated this message (1-based). + */ + line?: number | undefined; + /** + * Column number in the resource that generated this message (1-based). + */ + column?: number | undefined; + } + interface MessageAddedEventDataType { + /** + * Console message that has been added. + */ + message: ConsoleMessage; + } + } + namespace Profiler { + /** + * Profile node. Holds callsite information, execution statistics and child nodes. + */ + interface ProfileNode { + /** + * Unique id of the node. + */ + id: number; + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Number of samples where this node was on top of the call stack. + */ + hitCount?: number | undefined; + /** + * Child node ids. + */ + children?: number[] | undefined; + /** + * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. + */ + deoptReason?: string | undefined; + /** + * An array of source position ticks. + */ + positionTicks?: PositionTickInfo[] | undefined; + } + /** + * Profile. + */ + interface Profile { + /** + * The list of profile nodes. First item is the root node. + */ + nodes: ProfileNode[]; + /** + * Profiling start timestamp in microseconds. + */ + startTime: number; + /** + * Profiling end timestamp in microseconds. + */ + endTime: number; + /** + * Ids of samples top nodes. + */ + samples?: number[] | undefined; + /** + * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. + */ + timeDeltas?: number[] | undefined; + } + /** + * Specifies a number of samples attributed to a certain source position. + */ + interface PositionTickInfo { + /** + * Source line number (1-based). + */ + line: number; + /** + * Number of samples attributed to the source line. + */ + ticks: number; + } + /** + * Coverage data for a source range. + */ + interface CoverageRange { + /** + * JavaScript script source offset for the range start. + */ + startOffset: number; + /** + * JavaScript script source offset for the range end. + */ + endOffset: number; + /** + * Collected execution count of the source range. + */ + count: number; + } + /** + * Coverage data for a JavaScript function. + */ + interface FunctionCoverage { + /** + * JavaScript function name. + */ + functionName: string; + /** + * Source ranges inside the function with coverage data. + */ + ranges: CoverageRange[]; + /** + * Whether coverage data for this function has block granularity. + */ + isBlockCoverage: boolean; + } + /** + * Coverage data for a JavaScript script. + */ + interface ScriptCoverage { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Functions contained in the script that has coverage data. + */ + functions: FunctionCoverage[]; + } + /** + * Describes a type collected during runtime. + * @experimental + */ + interface TypeObject { + /** + * Name of a type collected with type profiling. + */ + name: string; + } + /** + * Source offset and types for a parameter or return value. + * @experimental + */ + interface TypeProfileEntry { + /** + * Source offset of the parameter or end of function for return values. + */ + offset: number; + /** + * The types for this parameter or return value. + */ + types: TypeObject[]; + } + /** + * Type profile data collected during runtime for a JavaScript script. + * @experimental + */ + interface ScriptTypeProfile { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Type profile entries for parameters and return values of the functions in the script. + */ + entries: TypeProfileEntry[]; + } + interface SetSamplingIntervalParameterType { + /** + * New sampling interval in microseconds. + */ + interval: number; + } + interface StartPreciseCoverageParameterType { + /** + * Collect accurate call counts beyond simple 'covered' or 'not covered'. + */ + callCount?: boolean | undefined; + /** + * Collect block-based coverage. + */ + detailed?: boolean | undefined; + } + interface StopReturnType { + /** + * Recorded profile. + */ + profile: Profile; + } + interface TakePreciseCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface GetBestEffortCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface TakeTypeProfileReturnType { + /** + * Type profile for all scripts since startTypeProfile() was turned on. + */ + result: ScriptTypeProfile[]; + } + interface ConsoleProfileStartedEventDataType { + id: string; + /** + * Location of console.profile(). + */ + location: Debugger.Location; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + interface ConsoleProfileFinishedEventDataType { + id: string; + /** + * Location of console.profileEnd(). + */ + location: Debugger.Location; + profile: Profile; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + } + namespace HeapProfiler { + /** + * Heap snapshot object id. + */ + type HeapSnapshotObjectId = string; + /** + * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. + */ + interface SamplingHeapProfileNode { + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Allocations size in bytes for the node excluding children. + */ + selfSize: number; + /** + * Child nodes. + */ + children: SamplingHeapProfileNode[]; + } + /** + * Profile. + */ + interface SamplingHeapProfile { + head: SamplingHeapProfileNode; + } + interface StartTrackingHeapObjectsParameterType { + trackAllocations?: boolean | undefined; + } + interface StopTrackingHeapObjectsParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. + */ + reportProgress?: boolean | undefined; + } + interface TakeHeapSnapshotParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. + */ + reportProgress?: boolean | undefined; + } + interface GetObjectByHeapObjectIdParameterType { + objectId: HeapSnapshotObjectId; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + } + interface AddInspectedHeapObjectParameterType { + /** + * Heap snapshot object id to be accessible by means of $x command line API. + */ + heapObjectId: HeapSnapshotObjectId; + } + interface GetHeapObjectIdParameterType { + /** + * Identifier of the object to get heap object id for. + */ + objectId: Runtime.RemoteObjectId; + } + interface StartSamplingParameterType { + /** + * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + */ + samplingInterval?: number | undefined; + } + interface GetObjectByHeapObjectIdReturnType { + /** + * Evaluation result. + */ + result: Runtime.RemoteObject; + } + interface GetHeapObjectIdReturnType { + /** + * Id of the heap snapshot object corresponding to the passed remote object id. + */ + heapSnapshotObjectId: HeapSnapshotObjectId; + } + interface StopSamplingReturnType { + /** + * Recorded sampling heap profile. + */ + profile: SamplingHeapProfile; + } + interface GetSamplingProfileReturnType { + /** + * Return the sampling profile being collected. + */ + profile: SamplingHeapProfile; + } + interface AddHeapSnapshotChunkEventDataType { + chunk: string; + } + interface ReportHeapSnapshotProgressEventDataType { + done: number; + total: number; + finished?: boolean | undefined; + } + interface LastSeenObjectIdEventDataType { + lastSeenObjectId: number; + timestamp: number; + } + interface HeapStatsUpdateEventDataType { + /** + * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. + */ + statsUpdate: number[]; + } + } + namespace NodeTracing { + interface TraceConfig { + /** + * Controls how the trace buffer stores data. + */ + recordMode?: string | undefined; + /** + * Included category filters. + */ + includedCategories: string[]; + } + interface StartParameterType { + traceConfig: TraceConfig; + } + interface GetCategoriesReturnType { + /** + * A list of supported tracing categories. + */ + categories: string[]; + } + interface DataCollectedEventDataType { + value: Array<{}>; + } + } + namespace NodeWorker { + type WorkerID = string; + /** + * Unique identifier of attached debugging session. + */ + type SessionID = string; + interface WorkerInfo { + workerId: WorkerID; + type: string; + title: string; + url: string; + } + interface SendMessageToWorkerParameterType { + message: string; + /** + * Identifier of the session. + */ + sessionId: SessionID; + } + interface EnableParameterType { + /** + * Whether to new workers should be paused until the frontend sends `Runtime.runIfWaitingForDebugger` + * message to run them. + */ + waitForDebuggerOnStart: boolean; + } + interface DetachParameterType { + sessionId: SessionID; + } + interface AttachedToWorkerEventDataType { + /** + * Identifier assigned to the session used to send/receive messages. + */ + sessionId: SessionID; + workerInfo: WorkerInfo; + waitingForDebugger: boolean; + } + interface DetachedFromWorkerEventDataType { + /** + * Detached session identifier. + */ + sessionId: SessionID; + } + interface ReceivedMessageFromWorkerEventDataType { + /** + * Identifier of a session which sends a message. + */ + sessionId: SessionID; + message: string; + } + } + namespace NodeRuntime { + interface NotifyWhenWaitingForDisconnectParameterType { + enabled: boolean; + } + } + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + */ + class Session extends EventEmitter { + /** + * Create a new instance of the inspector.Session class. + * The inspector session needs to be connected through session.connect() before the messages can be dispatched to the inspector backend. + */ + constructor(); + /** + * Connects a session to the inspector back-end. + * @since v8.0.0 + */ + connect(): void; + /** + * Immediately close the session. All pending message callbacks will be called + * with an error. `session.connect()` will need to be called to be able to send + * messages again. Reconnected session will lose all inspector state, such as + * enabled agents or configured breakpoints. + * @since v8.0.0 + */ + disconnect(): void; + /** + * Posts a message to the inspector back-end. `callback` will be notified when + * a response is received. `callback` is a function that accepts two optional + * arguments: error and message-specific result. + * + * ```js + * session.post('Runtime.evaluate', { expression: '2 + 2' }, + * (error, { result }) => console.log(result)); + * // Output: { type: 'number', value: 4, description: '4' } + * ``` + * + * The latest version of the V8 inspector protocol is published on the [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8\. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + * + * ## Example usage + * + * Apart from the debugger, various V8 Profilers are available through the DevTools + * protocol. + * @since v8.0.0 + */ + post(method: string, params?: {}, callback?: (err: Error | null, params?: {}) => void): void; + post(method: string, callback?: (err: Error | null, params?: {}) => void): void; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains', callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + post(method: 'Runtime.evaluate', callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + post(method: 'Runtime.awaitPromise', callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + post(method: 'Runtime.callFunctionOn', callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + post(method: 'Runtime.getProperties', callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObject', callback?: (err: Error | null) => void): void; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObjectGroup', callback?: (err: Error | null) => void): void; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger', callback?: (err: Error | null) => void): void; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable', callback?: (err: Error | null) => void): void; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable', callback?: (err: Error | null) => void): void; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.setCustomObjectFormatterEnabled', callback?: (err: Error | null) => void): void; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + post(method: 'Runtime.compileScript', callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.runScript', callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType, callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + post(method: 'Runtime.queryObjects', callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + /** + * Returns all let, const and class variables from global scope. + */ + post( + method: 'Runtime.globalLexicalScopeNames', + params?: Runtime.GlobalLexicalScopeNamesParameterType, + callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void + ): void; + post(method: 'Runtime.globalLexicalScopeNames', callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void): void; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable', callback?: (err: Error | null, params: Debugger.EnableReturnType) => void): void; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable', callback?: (err: Error | null) => void): void; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBreakpointsActive', callback?: (err: Error | null) => void): void; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setSkipAllPauses', callback?: (err: Error | null) => void): void; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + post(method: 'Debugger.setBreakpointByUrl', callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + post(method: 'Debugger.setBreakpoint', callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.removeBreakpoint', callback?: (err: Error | null) => void): void; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post( + method: 'Debugger.getPossibleBreakpoints', + params?: Debugger.GetPossibleBreakpointsParameterType, + callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void + ): void; + post(method: 'Debugger.getPossibleBreakpoints', callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.continueToLocation', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.pauseOnAsyncCall', callback?: (err: Error | null) => void): void; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver', callback?: (err: Error | null) => void): void; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.stepInto', callback?: (err: Error | null) => void): void; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut', callback?: (err: Error | null) => void): void; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause', callback?: (err: Error | null) => void): void; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync', callback?: (err: Error | null) => void): void; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume', callback?: (err: Error | null) => void): void; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType, callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + post(method: 'Debugger.getStackTrace', callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + post(method: 'Debugger.searchInContent', callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + post(method: 'Debugger.setScriptSource', callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + post(method: 'Debugger.restartFrame', callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + post(method: 'Debugger.getScriptSource', callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setPauseOnExceptions', callback?: (err: Error | null) => void): void; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + post(method: 'Debugger.evaluateOnCallFrame', callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setVariableValue', callback?: (err: Error | null) => void): void; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setReturnValue', callback?: (err: Error | null) => void): void; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setAsyncCallStackDepth', callback?: (err: Error | null) => void): void; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxPatterns', callback?: (err: Error | null) => void): void; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxedRanges', callback?: (err: Error | null) => void): void; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable', callback?: (err: Error | null) => void): void; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable', callback?: (err: Error | null) => void): void; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.disable', callback?: (err: Error | null) => void): void; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.setSamplingInterval', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.start', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.stop', callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.startPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage', callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage', callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; + /** + * Enable type profile. + * @experimental + */ + post(method: 'Profiler.startTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Disable type profile. Disabling releases type profile data collected so far. + * @experimental + */ + post(method: 'Profiler.stopTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Collect type profile. + * @experimental + */ + post(method: 'Profiler.takeTypeProfile', callback?: (err: Error | null, params: Profiler.TakeTypeProfileReturnType) => void): void; + post(method: 'HeapProfiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.disable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.collectGarbage', callback?: (err: Error | null) => void): void; + post( + method: 'HeapProfiler.getObjectByHeapObjectId', + params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, + callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void + ): void; + post(method: 'HeapProfiler.getObjectByHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.addInspectedHeapObject', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startSampling', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopSampling', callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; + post(method: 'HeapProfiler.getSamplingProfile', callback?: (err: Error | null, params: HeapProfiler.GetSamplingProfileReturnType) => void): void; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories', callback?: (err: Error | null, params: NodeTracing.GetCategoriesReturnType) => void): void; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeTracing.start', callback?: (err: Error | null) => void): void; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop', callback?: (err: Error | null) => void): void; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.sendMessageToWorker', callback?: (err: Error | null) => void): void; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.enable', callback?: (err: Error | null) => void): void; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable', callback?: (err: Error | null) => void): void; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.detach', callback?: (err: Error | null) => void): void; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', callback?: (err: Error | null) => void): void; + // Events + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification<{}>): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + } + /** + * Activate inspector on host and port. Equivalent to`node --inspect=[[host:]port]`, but can be done programmatically after node has + * started. + * + * If wait is `true`, will block until a client has connected to the inspect port + * and flow control has been passed to the debugger client. + * + * See the `security warning` regarding the `host`parameter usage. + * @param [port='what was specified on the CLI'] Port to listen on for inspector connections. Optional. + * @param [host='what was specified on the CLI'] Host to listen on for inspector connections. Optional. + * @param [wait=false] Block until a client has connected. Optional. + */ + function open(port?: number, host?: string, wait?: boolean): void; + /** + * Deactivate the inspector. Blocks until there are no active connections. + */ + function close(): void; + /** + * Return the URL of the active inspector, or `undefined` if there is none. + * + * ```console + * $ node --inspect -p 'inspector.url()' + * Debugger listening on ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * For help, see: https://nodejs.org/en/docs/inspector + * ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * + * $ node --inspect=localhost:3000 -p 'inspector.url()' + * Debugger listening on ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * For help, see: https://nodejs.org/en/docs/inspector + * ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * + * $ node -p 'inspector.url()' + * undefined + * ``` + */ + function url(): string | undefined; + /** + * Blocks until a client (existing or connected later) has sent`Runtime.runIfWaitingForDebugger` command. + * + * An exception will be thrown if there is no active inspector. + * @since v12.7.0 + */ + function waitForDebugger(): void; +} +/** + * The inspector module provides an API for interacting with the V8 inspector. + */ +declare module 'node:inspector' { + import inspector = require('inspector'); + export = inspector; +} diff --git a/node_modules/@types/node/module.d.ts b/node_modules/@types/node/module.d.ts new file mode 100755 index 0000000..d83aec9 --- /dev/null +++ b/node_modules/@types/node/module.d.ts @@ -0,0 +1,114 @@ +/** + * @since v0.3.7 + */ +declare module 'module' { + import { URL } from 'node:url'; + namespace Module { + /** + * The `module.syncBuiltinESMExports()` method updates all the live bindings for + * builtin `ES Modules` to match the properties of the `CommonJS` exports. It + * does not add or remove exported names from the `ES Modules`. + * + * ```js + * const fs = require('fs'); + * const assert = require('assert'); + * const { syncBuiltinESMExports } = require('module'); + * + * fs.readFile = newAPI; + * + * delete fs.readFileSync; + * + * function newAPI() { + * // ... + * } + * + * fs.newAPI = newAPI; + * + * syncBuiltinESMExports(); + * + * import('fs').then((esmFS) => { + * // It syncs the existing readFile property with the new value + * assert.strictEqual(esmFS.readFile, newAPI); + * // readFileSync has been deleted from the required fs + * assert.strictEqual('readFileSync' in fs, false); + * // syncBuiltinESMExports() does not remove readFileSync from esmFS + * assert.strictEqual('readFileSync' in esmFS, true); + * // syncBuiltinESMExports() does not add names + * assert.strictEqual(esmFS.newAPI, undefined); + * }); + * ``` + * @since v12.12.0 + */ + function syncBuiltinESMExports(): void; + /** + * `path` is the resolved path for the file for which a corresponding source map + * should be fetched. + * @since v13.7.0, v12.17.0 + */ + function findSourceMap(path: string, error?: Error): SourceMap; + interface SourceMapPayload { + file: string; + version: number; + sources: string[]; + sourcesContent: string[]; + names: string[]; + mappings: string; + sourceRoot: string; + } + interface SourceMapping { + generatedLine: number; + generatedColumn: number; + originalSource: string; + originalLine: number; + originalColumn: number; + } + /** + * @since v13.7.0, v12.17.0 + */ + class SourceMap { + /** + * Getter for the payload used to construct the `SourceMap` instance. + */ + readonly payload: SourceMapPayload; + constructor(payload: SourceMapPayload); + /** + * Given a line number and column number in the generated source file, returns + * an object representing the position in the original file. The object returned + * consists of the following keys: + */ + findEntry(line: number, column: number): SourceMapping; + } + } + interface Module extends NodeModule {} + class Module { + static runMain(): void; + static wrap(code: string): string; + static createRequire(path: string | URL): NodeRequire; + static builtinModules: string[]; + static Module: typeof Module; + constructor(id: string, parent?: Module); + } + global { + interface ImportMeta { + url: string; + /** + * @experimental + * This feature is only available with the `--experimental-import-meta-resolve` + * command flag enabled. + * + * Provides a module-relative resolution function scoped to each module, returning + * the URL string. + * + * @param specified The module specifier to resolve relative to `parent`. + * @param parent The absolute parent module URL to resolve from. If none + * is specified, the value of `import.meta.url` is used as the default. + */ + resolve?(specified: string, parent?: string | URL): Promise; + } + } + export = Module; +} +declare module 'node:module' { + import module = require('module'); + export = module; +} diff --git a/node_modules/@types/node/net.d.ts b/node_modules/@types/node/net.d.ts new file mode 100755 index 0000000..b735538 --- /dev/null +++ b/node_modules/@types/node/net.d.ts @@ -0,0 +1,869 @@ +/** + * > Stability: 2 - Stable + * + * The `net` module provides an asynchronous network API for creating stream-based + * TCP or `IPC` servers ({@link createServer}) and clients + * ({@link createConnection}). + * + * It can be accessed using: + * + * ```js + * const net = require('net'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/net.js) + */ +declare module 'net' { + import * as stream from 'node:stream'; + import { Abortable, EventEmitter } from 'node:events'; + import * as dns from 'node:dns'; + type LookupFunction = (hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void; + interface AddressInfo { + address: string; + family: string; + port: number; + } + interface SocketConstructorOpts { + fd?: number | undefined; + allowHalfOpen?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + signal?: AbortSignal; + } + interface OnReadOpts { + buffer: Uint8Array | (() => Uint8Array); + /** + * This function is called for every chunk of incoming data. + * Two arguments are passed to it: the number of bytes written to buffer and a reference to buffer. + * Return false from this function to implicitly pause() the socket. + */ + callback(bytesWritten: number, buf: Uint8Array): boolean; + } + interface ConnectOpts { + /** + * If specified, incoming data is stored in a single buffer and passed to the supplied callback when data arrives on the socket. + * Note: this will cause the streaming functionality to not provide any data, however events like 'error', 'end', and 'close' will + * still be emitted as normal and methods like pause() and resume() will also behave as expected. + */ + onread?: OnReadOpts | undefined; + } + interface TcpSocketConnectOpts extends ConnectOpts { + port: number; + host?: string | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + hints?: number | undefined; + family?: number | undefined; + lookup?: LookupFunction | undefined; + noDelay?: boolean | undefined; + keepAlive?: boolean | undefined; + keepAliveInitialDelay?: number | undefined; + } + interface IpcSocketConnectOpts extends ConnectOpts { + path: string; + } + type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; + type SocketReadyState = 'opening' | 'open' | 'readOnly' | 'writeOnly' | 'closed'; + /** + * This class is an abstraction of a TCP socket or a streaming `IPC` endpoint + * (uses named pipes on Windows, and Unix domain sockets otherwise). It is also + * an `EventEmitter`. + * + * A `net.Socket` can be created by the user and used directly to interact with + * a server. For example, it is returned by {@link createConnection}, + * so the user can use it to talk to the server. + * + * It can also be created by Node.js and passed to the user when a connection + * is received. For example, it is passed to the listeners of a `'connection'` event emitted on a {@link Server}, so the user can use + * it to interact with the client. + * @since v0.3.4 + */ + class Socket extends stream.Duplex { + constructor(options?: SocketConstructorOpts); + /** + * Sends data on the socket. The second parameter specifies the encoding in the + * case of a string. It defaults to UTF8 encoding. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is again free. + * + * The optional `callback` parameter will be executed when the data is finally + * written out, which may not be immediately. + * + * See `Writable` stream `write()` method for more + * information. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + */ + write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean; + write(str: Uint8Array | string, encoding?: BufferEncoding, cb?: (err?: Error) => void): boolean; + /** + * Initiate a connection on a given socket. + * + * Possible signatures: + * + * * `socket.connect(options[, connectListener])` + * * `socket.connect(path[, connectListener])` for `IPC` connections. + * * `socket.connect(port[, host][, connectListener])` for TCP connections. + * * Returns: `net.Socket` The socket itself. + * + * This function is asynchronous. When the connection is established, the `'connect'` event will be emitted. If there is a problem connecting, + * instead of a `'connect'` event, an `'error'` event will be emitted with + * the error passed to the `'error'` listener. + * The last parameter `connectListener`, if supplied, will be added as a listener + * for the `'connect'` event **once**. + * + * This function should only be used for reconnecting a socket after`'close'` has been emitted or otherwise it may lead to undefined + * behavior. + */ + connect(options: SocketConnectOpts, connectionListener?: () => void): this; + connect(port: number, host: string, connectionListener?: () => void): this; + connect(port: number, connectionListener?: () => void): this; + connect(path: string, connectionListener?: () => void): this; + /** + * Set the encoding for the socket as a `Readable Stream`. See `readable.setEncoding()` for more information. + * @since v0.1.90 + * @return The socket itself. + */ + setEncoding(encoding?: BufferEncoding): this; + /** + * Pauses the reading of data. That is, `'data'` events will not be emitted. + * Useful to throttle back an upload. + * @return The socket itself. + */ + pause(): this; + /** + * Close the TCP connection by sending an RST packet and destroy the stream. + * If this TCP socket is in connecting status, it will send an RST packet + * and destroy this TCP socket once it is connected. Otherwise, it will call + * `socket.destroy` with an `ERR_SOCKET_CLOSED` Error. If this is not a TCP socket + * (for example, a pipe), calling this method will immediately throw + * an `ERR_INVALID_HANDLE_TYPE` Error. + * @since v18.3.0 + * @return The socket itself. + */ + resetAndDestroy(): this; + /** + * Resumes reading after a call to `socket.pause()`. + * @return The socket itself. + */ + resume(): this; + /** + * Sets the socket to timeout after `timeout` milliseconds of inactivity on + * the socket. By default `net.Socket` do not have a timeout. + * + * When an idle timeout is triggered the socket will receive a `'timeout'` event but the connection will not be severed. The user must manually call `socket.end()` or `socket.destroy()` to + * end the connection. + * + * ```js + * socket.setTimeout(3000); + * socket.on('timeout', () => { + * console.log('socket timeout'); + * socket.end(); + * }); + * ``` + * + * If `timeout` is 0, then the existing idle timeout is disabled. + * + * The optional `callback` parameter will be added as a one-time listener for the `'timeout'` event. + * @since v0.1.90 + * @return The socket itself. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Enable/disable the use of Nagle's algorithm. + * + * When a TCP connection is created, it will have Nagle's algorithm enabled. + * + * Nagle's algorithm delays data before it is sent via the network. It attempts + * to optimize throughput at the expense of latency. + * + * Passing `true` for `noDelay` or not passing an argument will disable Nagle's + * algorithm for the socket. Passing `false` for `noDelay` will enable Nagle's + * algorithm. + * @since v0.1.90 + * @param [noDelay=true] + * @return The socket itself. + */ + setNoDelay(noDelay?: boolean): this; + /** + * Enable/disable keep-alive functionality, and optionally set the initial + * delay before the first keepalive probe is sent on an idle socket. + * + * Set `initialDelay` (in milliseconds) to set the delay between the last + * data packet received and the first keepalive probe. Setting `0` for`initialDelay` will leave the value unchanged from the default + * (or previous) setting. + * + * Enabling the keep-alive functionality will set the following socket options: + * + * * `SO_KEEPALIVE=1` + * * `TCP_KEEPIDLE=initialDelay` + * * `TCP_KEEPCNT=10` + * * `TCP_KEEPINTVL=1` + * @since v0.1.92 + * @param [enable=false] + * @param [initialDelay=0] + * @return The socket itself. + */ + setKeepAlive(enable?: boolean, initialDelay?: number): this; + /** + * Returns the bound `address`, the address `family` name and `port` of the + * socket as reported by the operating system:`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }` + * @since v0.1.90 + */ + address(): AddressInfo | {}; + /** + * Calling `unref()` on a socket will allow the program to exit if this is the only + * active socket in the event system. If the socket is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + unref(): this; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will _not_ let the program exit if it's the only socket left (the default behavior). + * If the socket is `ref`ed calling `ref` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + ref(): this; + /** + * This property shows the number of characters buffered for writing. The buffer + * may contain strings whose length after encoding is not yet known. So this number + * is only an approximation of the number of bytes in the buffer. + * + * `net.Socket` has the property that `socket.write()` always works. This is to + * help users get up and running quickly. The computer cannot always keep up + * with the amount of data that is written to a socket. The network connection + * simply might be too slow. Node.js will internally queue up the data written to a + * socket and send it out over the wire when it is possible. + * + * The consequence of this internal buffering is that memory may grow. + * Users who experience large or growing `bufferSize` should attempt to + * "throttle" the data flows in their program with `socket.pause()` and `socket.resume()`. + * @since v0.3.8 + * @deprecated Since v14.6.0 - Use `writableLength` instead. + */ + readonly bufferSize: number; + /** + * The amount of received bytes. + * @since v0.5.3 + */ + readonly bytesRead: number; + /** + * The amount of bytes sent. + * @since v0.5.3 + */ + readonly bytesWritten: number; + /** + * If `true`,`socket.connect(options[, connectListener])` was + * called and has not yet finished. It will stay `true` until the socket becomes + * connected, then it is set to `false` and the `'connect'` event is emitted. Note + * that the `socket.connect(options[, connectListener])` callback is a listener for the `'connect'` event. + * @since v6.1.0 + */ + readonly connecting: boolean; + /** + * See `writable.destroyed` for further details. + */ + readonly destroyed: boolean; + /** + * The string representation of the local IP address the remote client is + * connecting on. For example, in a server listening on `'0.0.0.0'`, if a client + * connects on `'192.168.1.1'`, the value of `socket.localAddress` would be`'192.168.1.1'`. + * @since v0.9.6 + */ + readonly localAddress?: string; + /** + * The numeric representation of the local port. For example, `80` or `21`. + * @since v0.9.6 + */ + readonly localPort?: number; + /** + * The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + * @since v18.8.0 + */ + readonly localFamily?: string; + /** + * This property represents the state of the connection as a string. + * @see {https://nodejs.org/api/net.html#socketreadystate} + * @since v0.5.0 + */ + readonly readyState: SocketReadyState; + /** + * The string representation of the remote IP address. For example,`'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remoteAddress?: string | undefined; + /** + * The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. + * @since v0.11.14 + */ + readonly remoteFamily?: string | undefined; + /** + * The numeric representation of the remote port. For example, `80` or `21`. + * @since v0.5.10 + */ + readonly remotePort?: number | undefined; + /** + * The socket timeout in milliseconds as set by socket.setTimeout(). It is undefined if a timeout has not been set. + * @since v10.7.0 + */ + readonly timeout?: number | undefined; + /** + * Half-closes the socket. i.e., it sends a FIN packet. It is possible the + * server will still send some data. + * + * See `writable.end()` for further details. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + * @param callback Optional callback for when the socket is finished. + * @return The socket itself. + */ + end(callback?: () => void): this; + end(buffer: Uint8Array | string, callback?: () => void): this; + end(str: Uint8Array | string, encoding?: BufferEncoding, callback?: () => void): this; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. data + * 4. drain + * 5. end + * 6. error + * 7. lookup + * 8. ready + * 9. timeout + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: (hadError: boolean) => void): this; + addListener(event: 'connect', listener: () => void): this; + addListener(event: 'data', listener: (data: Buffer) => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + addListener(event: 'ready', listener: () => void): this; + addListener(event: 'timeout', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close', hadError: boolean): boolean; + emit(event: 'connect'): boolean; + emit(event: 'data', data: Buffer): boolean; + emit(event: 'drain'): boolean; + emit(event: 'end'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'lookup', err: Error, address: string, family: string | number, host: string): boolean; + emit(event: 'ready'): boolean; + emit(event: 'timeout'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: (hadError: boolean) => void): this; + on(event: 'connect', listener: () => void): this; + on(event: 'data', listener: (data: Buffer) => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + on(event: 'ready', listener: () => void): this; + on(event: 'timeout', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: (hadError: boolean) => void): this; + once(event: 'connect', listener: () => void): this; + once(event: 'data', listener: (data: Buffer) => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + once(event: 'ready', listener: () => void): this; + once(event: 'timeout', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: (hadError: boolean) => void): this; + prependListener(event: 'connect', listener: () => void): this; + prependListener(event: 'data', listener: (data: Buffer) => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + prependListener(event: 'ready', listener: () => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: (hadError: boolean) => void): this; + prependOnceListener(event: 'connect', listener: () => void): this; + prependOnceListener(event: 'data', listener: (data: Buffer) => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + prependOnceListener(event: 'ready', listener: () => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + } + interface ListenOptions extends Abortable { + port?: number | undefined; + host?: string | undefined; + backlog?: number | undefined; + path?: string | undefined; + exclusive?: boolean | undefined; + readableAll?: boolean | undefined; + writableAll?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + } + interface ServerOpts { + /** + * Indicates whether half-opened TCP connections are allowed. + * @default false + */ + allowHalfOpen?: boolean | undefined; + /** + * Indicates whether the socket should be paused on incoming connections. + * @default false + */ + pauseOnConnect?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + interface DropArgument { + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + } + /** + * This class is used to create a TCP or `IPC` server. + * @since v0.1.90 + */ + class Server extends EventEmitter { + constructor(connectionListener?: (socket: Socket) => void); + constructor(options?: ServerOpts, connectionListener?: (socket: Socket) => void); + /** + * Start a server listening for connections. A `net.Server` can be a TCP or + * an `IPC` server depending on what it listens to. + * + * Possible signatures: + * + * * `server.listen(handle[, backlog][, callback])` + * * `server.listen(options[, callback])` + * * `server.listen(path[, backlog][, callback])` for `IPC` servers + * * `server.listen([port[, host[, backlog]]][, callback])` for TCP servers + * + * This function is asynchronous. When the server starts listening, the `'listening'` event will be emitted. The last parameter `callback`will be added as a listener for the `'listening'` + * event. + * + * All `listen()` methods can take a `backlog` parameter to specify the maximum + * length of the queue of pending connections. The actual length will be determined + * by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn`on Linux. The default value of this parameter is 511 (not 512). + * + * All {@link Socket} are set to `SO_REUSEADDR` (see [`socket(7)`](https://man7.org/linux/man-pages/man7/socket.7.html) for + * details). + * + * The `server.listen()` method can be called again if and only if there was an + * error during the first `server.listen()` call or `server.close()` has been + * called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. + * + * One of the most common errors raised when listening is `EADDRINUSE`. + * This happens when another server is already listening on the requested`port`/`path`/`handle`. One way to handle this would be to retry + * after a certain amount of time: + * + * ```js + * server.on('error', (e) => { + * if (e.code === 'EADDRINUSE') { + * console.log('Address in use, retrying...'); + * setTimeout(() => { + * server.close(); + * server.listen(PORT, HOST); + * }, 1000); + * } + * }); + * ``` + */ + listen(port?: number, hostname?: string, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, hostname?: string, listeningListener?: () => void): this; + listen(port?: number, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, listeningListener?: () => void): this; + listen(path: string, backlog?: number, listeningListener?: () => void): this; + listen(path: string, listeningListener?: () => void): this; + listen(options: ListenOptions, listeningListener?: () => void): this; + listen(handle: any, backlog?: number, listeningListener?: () => void): this; + listen(handle: any, listeningListener?: () => void): this; + /** + * Stops the server from accepting new connections and keeps existing + * connections. This function is asynchronous, the server is finally closed + * when all connections are ended and the server emits a `'close'` event. + * The optional `callback` will be called once the `'close'` event occurs. Unlike + * that event, it will be called with an `Error` as its only argument if the server + * was not open when it was closed. + * @since v0.1.90 + * @param callback Called when the server is closed. + */ + close(callback?: (err?: Error) => void): this; + /** + * Returns the bound `address`, the address `family` name, and `port` of the server + * as reported by the operating system if listening on an IP socket + * (useful to find which port was assigned when getting an OS-assigned address):`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }`. + * + * For a server listening on a pipe or Unix domain socket, the name is returned + * as a string. + * + * ```js + * const server = net.createServer((socket) => { + * socket.end('goodbye\n'); + * }).on('error', (err) => { + * // Handle errors here. + * throw err; + * }); + * + * // Grab an arbitrary unused port. + * server.listen(() => { + * console.log('opened server on', server.address()); + * }); + * ``` + * + * `server.address()` returns `null` before the `'listening'` event has been + * emitted or after calling `server.close()`. + * @since v0.1.90 + */ + address(): AddressInfo | string | null; + /** + * Asynchronously get the number of concurrent connections on the server. Works + * when sockets were sent to forks. + * + * Callback should take two arguments `err` and `count`. + * @since v0.9.7 + */ + getConnections(cb: (error: Error | null, count: number) => void): void; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will _not_ let the program exit if it's the only server left (the default behavior). + * If the server is `ref`ed calling `ref()` again will have no effect. + * @since v0.9.1 + */ + ref(): this; + /** + * Calling `unref()` on a server will allow the program to exit if this is the only + * active server in the event system. If the server is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + */ + unref(): this; + /** + * Set this property to reject connections when the server's connection count gets + * high. + * + * It is not recommended to use this option once a socket has been sent to a child + * with `child_process.fork()`. + * @since v0.2.0 + */ + maxConnections: number; + connections: number; + /** + * Indicates whether or not the server is listening for connections. + * @since v5.7.0 + */ + listening: boolean; + /** + * events.EventEmitter + * 1. close + * 2. connection + * 3. error + * 4. listening + * 5. drop + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connection', listener: (socket: Socket) => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'drop', listener: (data?: DropArgument) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'connection', socket: Socket): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit(event: 'drop', data?: DropArgument): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connection', listener: (socket: Socket) => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'drop', listener: (data?: DropArgument) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connection', listener: (socket: Socket) => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'drop', listener: (data?: DropArgument) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connection', listener: (socket: Socket) => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'drop', listener: (data?: DropArgument) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connection', listener: (socket: Socket) => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'drop', listener: (data?: DropArgument) => void): this; + } + type IPVersion = 'ipv4' | 'ipv6'; + /** + * The `BlockList` object can be used with some network APIs to specify rules for + * disabling inbound or outbound access to specific IP addresses, IP ranges, or + * IP subnets. + * @since v15.0.0, v14.18.0 + */ + class BlockList { + /** + * Adds a rule to block the given IP address. + * @since v15.0.0, v14.18.0 + * @param address An IPv4 or IPv6 address. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addAddress(address: string, type?: IPVersion): void; + addAddress(address: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses from `start` (inclusive) to`end` (inclusive). + * @since v15.0.0, v14.18.0 + * @param start The starting IPv4 or IPv6 address in the range. + * @param end The ending IPv4 or IPv6 address in the range. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addRange(start: string, end: string, type?: IPVersion): void; + addRange(start: SocketAddress, end: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses specified as a subnet mask. + * @since v15.0.0, v14.18.0 + * @param net The network IPv4 or IPv6 address. + * @param prefix The number of CIDR prefix bits. For IPv4, this must be a value between `0` and `32`. For IPv6, this must be between `0` and `128`. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addSubnet(net: SocketAddress, prefix: number): void; + addSubnet(net: string, prefix: number, type?: IPVersion): void; + /** + * Returns `true` if the given IP address matches any of the rules added to the`BlockList`. + * + * ```js + * const blockList = new net.BlockList(); + * blockList.addAddress('123.123.123.123'); + * blockList.addRange('10.0.0.1', '10.0.0.10'); + * blockList.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + * + * console.log(blockList.check('123.123.123.123')); // Prints: true + * console.log(blockList.check('10.0.0.3')); // Prints: true + * console.log(blockList.check('222.111.111.222')); // Prints: false + * + * // IPv6 notation for IPv4 addresses works: + * console.log(blockList.check('::ffff:7b7b:7b7b', 'ipv6')); // Prints: true + * console.log(blockList.check('::ffff:123.123.123.123', 'ipv6')); // Prints: true + * ``` + * @since v15.0.0, v14.18.0 + * @param address The IP address to check + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + check(address: SocketAddress): boolean; + check(address: string, type?: IPVersion): boolean; + } + interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; + /** + * Creates a new TCP or `IPC` server. + * + * If `allowHalfOpen` is set to `true`, when the other end of the socket + * signals the end of transmission, the server will only send back the end of + * transmission when `socket.end()` is explicitly called. For example, in the + * context of TCP, when a FIN packed is received, a FIN packed is sent + * back only when `socket.end()` is explicitly called. Until then the + * connection is half-closed (non-readable but still writable). See `'end'` event and [RFC 1122](https://tools.ietf.org/html/rfc1122) (section 4.2.2.13) for more information. + * + * If `pauseOnConnect` is set to `true`, then the socket associated with each + * incoming connection will be paused, and no data will be read from its handle. + * This allows connections to be passed between processes without any data being + * read by the original process. To begin reading data from a paused socket, call `socket.resume()`. + * + * The server can be a TCP server or an `IPC` server, depending on what it `listen()` to. + * + * Here is an example of a TCP echo server which listens for connections + * on port 8124: + * + * ```js + * const net = require('net'); + * const server = net.createServer((c) => { + * // 'connection' listener. + * console.log('client connected'); + * c.on('end', () => { + * console.log('client disconnected'); + * }); + * c.write('hello\r\n'); + * c.pipe(c); + * }); + * server.on('error', (err) => { + * throw err; + * }); + * server.listen(8124, () => { + * console.log('server bound'); + * }); + * ``` + * + * Test this by using `telnet`: + * + * ```console + * $ telnet localhost 8124 + * ``` + * + * To listen on the socket `/tmp/echo.sock`: + * + * ```js + * server.listen('/tmp/echo.sock', () => { + * console.log('server bound'); + * }); + * ``` + * + * Use `nc` to connect to a Unix domain socket server: + * + * ```console + * $ nc -U /tmp/echo.sock + * ``` + * @since v0.5.0 + * @param connectionListener Automatically set as a listener for the {@link 'connection'} event. + */ + function createServer(connectionListener?: (socket: Socket) => void): Server; + function createServer(options?: ServerOpts, connectionListener?: (socket: Socket) => void): Server; + /** + * Aliases to {@link createConnection}. + * + * Possible signatures: + * + * * {@link connect} + * * {@link connect} for `IPC` connections. + * * {@link connect} for TCP connections. + */ + function connect(options: NetConnectOpts, connectionListener?: () => void): Socket; + function connect(port: number, host?: string, connectionListener?: () => void): Socket; + function connect(path: string, connectionListener?: () => void): Socket; + /** + * A factory function, which creates a new {@link Socket}, + * immediately initiates connection with `socket.connect()`, + * then returns the `net.Socket` that starts the connection. + * + * When the connection is established, a `'connect'` event will be emitted + * on the returned socket. The last parameter `connectListener`, if supplied, + * will be added as a listener for the `'connect'` event **once**. + * + * Possible signatures: + * + * * {@link createConnection} + * * {@link createConnection} for `IPC` connections. + * * {@link createConnection} for TCP connections. + * + * The {@link connect} function is an alias to this function. + */ + function createConnection(options: NetConnectOpts, connectionListener?: () => void): Socket; + function createConnection(port: number, host?: string, connectionListener?: () => void): Socket; + function createConnection(path: string, connectionListener?: () => void): Socket; + /** + * Returns `6` if `input` is an IPv6 address. Returns `4` if `input` is an IPv4 + * address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no leading zeroes. Otherwise, returns`0`. + * + * ```js + * net.isIP('::1'); // returns 6 + * net.isIP('127.0.0.1'); // returns 4 + * net.isIP('127.000.000.001'); // returns 0 + * net.isIP('127.0.0.1/24'); // returns 0 + * net.isIP('fhqwhgads'); // returns 0 + * ``` + * @since v0.3.0 + */ + function isIP(input: string): number; + /** + * Returns `true` if `input` is an IPv4 address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no + * leading zeroes. Otherwise, returns `false`. + * + * ```js + * net.isIPv4('127.0.0.1'); // returns true + * net.isIPv4('127.000.000.001'); // returns false + * net.isIPv4('127.0.0.1/24'); // returns false + * net.isIPv4('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv4(input: string): boolean; + /** + * Returns `true` if `input` is an IPv6 address. Otherwise, returns `false`. + * + * ```js + * net.isIPv6('::1'); // returns true + * net.isIPv6('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv6(input: string): boolean; + interface SocketAddressInitOptions { + /** + * The network address as either an IPv4 or IPv6 string. + * @default 127.0.0.1 + */ + address?: string | undefined; + /** + * @default `'ipv4'` + */ + family?: IPVersion | undefined; + /** + * An IPv6 flow-label used only if `family` is `'ipv6'`. + * @default 0 + */ + flowlabel?: number | undefined; + /** + * An IP port. + * @default 0 + */ + port?: number | undefined; + } + /** + * @since v15.14.0, v14.18.0 + */ + class SocketAddress { + constructor(options: SocketAddressInitOptions); + /** + * @since v15.14.0, v14.18.0 + */ + readonly address: string; + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly family: IPVersion; + /** + * @since v15.14.0, v14.18.0 + */ + readonly port: number; + /** + * @since v15.14.0, v14.18.0 + */ + readonly flowlabel: number; + } +} +declare module 'node:net' { + export * from 'net'; +} diff --git a/node_modules/@types/node/os.d.ts b/node_modules/@types/node/os.d.ts new file mode 100755 index 0000000..3c55599 --- /dev/null +++ b/node_modules/@types/node/os.d.ts @@ -0,0 +1,466 @@ +/** + * The `os` module provides operating system-related utility methods and + * properties. It can be accessed using: + * + * ```js + * const os = require('os'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/os.js) + */ +declare module 'os' { + interface CpuInfo { + model: string; + speed: number; + times: { + user: number; + nice: number; + sys: number; + idle: number; + irq: number; + }; + } + interface NetworkInterfaceBase { + address: string; + netmask: string; + mac: string; + internal: boolean; + cidr: string | null; + } + interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { + family: 'IPv4'; + scopeid?: undefined; + } + interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { + family: 'IPv6'; + scopeid: number; + } + interface UserInfo { + username: T; + uid: number; + gid: number; + shell: T; + homedir: T; + } + type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; + /** + * Returns the host name of the operating system as a string. + * @since v0.3.3 + */ + function hostname(): string; + /** + * Returns an array containing the 1, 5, and 15 minute load averages. + * + * The load average is a measure of system activity calculated by the operating + * system and expressed as a fractional number. + * + * The load average is a Unix-specific concept. On Windows, the return value is + * always `[0, 0, 0]`. + * @since v0.3.3 + */ + function loadavg(): number[]; + /** + * Returns the system uptime in number of seconds. + * @since v0.3.3 + */ + function uptime(): number; + /** + * Returns the amount of free system memory in bytes as an integer. + * @since v0.3.3 + */ + function freemem(): number; + /** + * Returns the total amount of system memory in bytes as an integer. + * @since v0.3.3 + */ + function totalmem(): number; + /** + * Returns an array of objects containing information about each logical CPU core. + * + * The properties included on each object include: + * + * ```js + * [ + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 252020, + * nice: 0, + * sys: 30340, + * idle: 1070356870, + * irq: 0 + * } + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 306960, + * nice: 0, + * sys: 26980, + * idle: 1071569080, + * irq: 0 + * } + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 248450, + * nice: 0, + * sys: 21750, + * idle: 1070919370, + * irq: 0 + * } + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 256880, + * nice: 0, + * sys: 19430, + * idle: 1070905480, + * irq: 20 + * } + * }, + * ] + * ``` + * + * `nice` values are POSIX-only. On Windows, the `nice` values of all processors + * are always 0. + * @since v0.3.3 + */ + function cpus(): CpuInfo[]; + /** + * Returns the operating system name as returned by [`uname(3)`](https://linux.die.net/man/3/uname). For example, it + * returns `'Linux'` on Linux, `'Darwin'` on macOS, and `'Windows_NT'` on Windows. + * + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for additional information + * about the output of running [`uname(3)`](https://linux.die.net/man/3/uname) on various operating systems. + * @since v0.3.3 + */ + function type(): string; + /** + * Returns the operating system as a string. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `GetVersionExW()` is used. See + * [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v0.3.3 + */ + function release(): string; + /** + * Returns an object containing network interfaces that have been assigned a + * network address. + * + * Each key on the returned object identifies a network interface. The associated + * value is an array of objects that each describe an assigned network address. + * + * The properties available on the assigned network address object include: + * + * ```js + * { + * lo: [ + * { + * address: '127.0.0.1', + * netmask: '255.0.0.0', + * family: 'IPv4', + * mac: '00:00:00:00:00:00', + * internal: true, + * cidr: '127.0.0.1/8' + * }, + * { + * address: '::1', + * netmask: 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + * family: 'IPv6', + * mac: '00:00:00:00:00:00', + * scopeid: 0, + * internal: true, + * cidr: '::1/128' + * } + * ], + * eth0: [ + * { + * address: '192.168.1.108', + * netmask: '255.255.255.0', + * family: 'IPv4', + * mac: '01:02:03:0a:0b:0c', + * internal: false, + * cidr: '192.168.1.108/24' + * }, + * { + * address: 'fe80::a00:27ff:fe4e:66a1', + * netmask: 'ffff:ffff:ffff:ffff::', + * family: 'IPv6', + * mac: '01:02:03:0a:0b:0c', + * scopeid: 1, + * internal: false, + * cidr: 'fe80::a00:27ff:fe4e:66a1/64' + * } + * ] + * } + * ``` + * @since v0.6.0 + */ + function networkInterfaces(): NodeJS.Dict; + /** + * Returns the string path of the current user's home directory. + * + * On POSIX, it uses the `$HOME` environment variable if defined. Otherwise it + * uses the [effective UID](https://en.wikipedia.org/wiki/User_identifier#Effective_user_ID) to look up the user's home directory. + * + * On Windows, it uses the `USERPROFILE` environment variable if defined. + * Otherwise it uses the path to the profile directory of the current user. + * @since v2.3.0 + */ + function homedir(): string; + /** + * Returns information about the currently effective user. On POSIX platforms, + * this is typically a subset of the password file. The returned object includes + * the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and`gid` fields are `-1`, and `shell` is `null`. + * + * The value of `homedir` returned by `os.userInfo()` is provided by the operating + * system. This differs from the result of `os.homedir()`, which queries + * environment variables for the home directory before falling back to the + * operating system response. + * + * Throws a `SystemError` if a user has no `username` or `homedir`. + * @since v6.0.0 + */ + function userInfo(options: { encoding: 'buffer' }): UserInfo; + function userInfo(options?: { encoding: BufferEncoding }): UserInfo; + type SignalConstants = { + [key in NodeJS.Signals]: number; + }; + namespace constants { + const UV_UDP_REUSEADDR: number; + namespace signals {} + const signals: SignalConstants; + namespace errno { + const E2BIG: number; + const EACCES: number; + const EADDRINUSE: number; + const EADDRNOTAVAIL: number; + const EAFNOSUPPORT: number; + const EAGAIN: number; + const EALREADY: number; + const EBADF: number; + const EBADMSG: number; + const EBUSY: number; + const ECANCELED: number; + const ECHILD: number; + const ECONNABORTED: number; + const ECONNREFUSED: number; + const ECONNRESET: number; + const EDEADLK: number; + const EDESTADDRREQ: number; + const EDOM: number; + const EDQUOT: number; + const EEXIST: number; + const EFAULT: number; + const EFBIG: number; + const EHOSTUNREACH: number; + const EIDRM: number; + const EILSEQ: number; + const EINPROGRESS: number; + const EINTR: number; + const EINVAL: number; + const EIO: number; + const EISCONN: number; + const EISDIR: number; + const ELOOP: number; + const EMFILE: number; + const EMLINK: number; + const EMSGSIZE: number; + const EMULTIHOP: number; + const ENAMETOOLONG: number; + const ENETDOWN: number; + const ENETRESET: number; + const ENETUNREACH: number; + const ENFILE: number; + const ENOBUFS: number; + const ENODATA: number; + const ENODEV: number; + const ENOENT: number; + const ENOEXEC: number; + const ENOLCK: number; + const ENOLINK: number; + const ENOMEM: number; + const ENOMSG: number; + const ENOPROTOOPT: number; + const ENOSPC: number; + const ENOSR: number; + const ENOSTR: number; + const ENOSYS: number; + const ENOTCONN: number; + const ENOTDIR: number; + const ENOTEMPTY: number; + const ENOTSOCK: number; + const ENOTSUP: number; + const ENOTTY: number; + const ENXIO: number; + const EOPNOTSUPP: number; + const EOVERFLOW: number; + const EPERM: number; + const EPIPE: number; + const EPROTO: number; + const EPROTONOSUPPORT: number; + const EPROTOTYPE: number; + const ERANGE: number; + const EROFS: number; + const ESPIPE: number; + const ESRCH: number; + const ESTALE: number; + const ETIME: number; + const ETIMEDOUT: number; + const ETXTBSY: number; + const EWOULDBLOCK: number; + const EXDEV: number; + const WSAEINTR: number; + const WSAEBADF: number; + const WSAEACCES: number; + const WSAEFAULT: number; + const WSAEINVAL: number; + const WSAEMFILE: number; + const WSAEWOULDBLOCK: number; + const WSAEINPROGRESS: number; + const WSAEALREADY: number; + const WSAENOTSOCK: number; + const WSAEDESTADDRREQ: number; + const WSAEMSGSIZE: number; + const WSAEPROTOTYPE: number; + const WSAENOPROTOOPT: number; + const WSAEPROTONOSUPPORT: number; + const WSAESOCKTNOSUPPORT: number; + const WSAEOPNOTSUPP: number; + const WSAEPFNOSUPPORT: number; + const WSAEAFNOSUPPORT: number; + const WSAEADDRINUSE: number; + const WSAEADDRNOTAVAIL: number; + const WSAENETDOWN: number; + const WSAENETUNREACH: number; + const WSAENETRESET: number; + const WSAECONNABORTED: number; + const WSAECONNRESET: number; + const WSAENOBUFS: number; + const WSAEISCONN: number; + const WSAENOTCONN: number; + const WSAESHUTDOWN: number; + const WSAETOOMANYREFS: number; + const WSAETIMEDOUT: number; + const WSAECONNREFUSED: number; + const WSAELOOP: number; + const WSAENAMETOOLONG: number; + const WSAEHOSTDOWN: number; + const WSAEHOSTUNREACH: number; + const WSAENOTEMPTY: number; + const WSAEPROCLIM: number; + const WSAEUSERS: number; + const WSAEDQUOT: number; + const WSAESTALE: number; + const WSAEREMOTE: number; + const WSASYSNOTREADY: number; + const WSAVERNOTSUPPORTED: number; + const WSANOTINITIALISED: number; + const WSAEDISCON: number; + const WSAENOMORE: number; + const WSAECANCELLED: number; + const WSAEINVALIDPROCTABLE: number; + const WSAEINVALIDPROVIDER: number; + const WSAEPROVIDERFAILEDINIT: number; + const WSASYSCALLFAILURE: number; + const WSASERVICE_NOT_FOUND: number; + const WSATYPE_NOT_FOUND: number; + const WSA_E_NO_MORE: number; + const WSA_E_CANCELLED: number; + const WSAEREFUSED: number; + } + namespace priority { + const PRIORITY_LOW: number; + const PRIORITY_BELOW_NORMAL: number; + const PRIORITY_NORMAL: number; + const PRIORITY_ABOVE_NORMAL: number; + const PRIORITY_HIGH: number; + const PRIORITY_HIGHEST: number; + } + } + const devNull: string; + const EOL: string; + /** + * Returns the operating system CPU architecture for which the Node.js binary was + * compiled. Possible values are `'arm'`, `'arm64'`, `'ia32'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * The return value is equivalent to `process.arch`. + * @since v0.5.0 + */ + function arch(): string; + /** + * Returns a string identifying the kernel version. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v13.11.0, v12.17.0 + */ + function version(): string; + /** + * Returns a string identifying the operating system platform for which + * the Node.js binary was compiled. The value is set at compile time. + * Possible values are `'aix'`, `'darwin'`, `'freebsd'`,`'linux'`,`'openbsd'`, `'sunos'`, and `'win32'`. + * + * The return value is equivalent to `process.platform`. + * + * The value `'android'` may also be returned if Node.js is built on the Android + * operating system. [Android support is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.5.0 + */ + function platform(): NodeJS.Platform; + /** + * Returns the machine type as a string, such as arm, aarch64, mips, mips64, ppc64, ppc64le, s390, s390x, i386, i686, x86_64. + * + * On POSIX systems, the machine type is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). + * On Windows, `RtlGetVersion()` is used, and if it is not available, `GetVersionExW()` will be used. + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v18.9.0 + */ + function machine(): string; + /** + * Returns the operating system's default directory for temporary files as a + * string. + * @since v0.9.9 + */ + function tmpdir(): string; + /** + * Returns a string identifying the endianness of the CPU for which the Node.js + * binary was compiled. + * + * Possible values are `'BE'` for big endian and `'LE'` for little endian. + * @since v0.9.4 + */ + function endianness(): 'BE' | 'LE'; + /** + * Returns the scheduling priority for the process specified by `pid`. If `pid` is + * not provided or is `0`, the priority of the current process is returned. + * @since v10.10.0 + * @param [pid=0] The process ID to retrieve scheduling priority for. + */ + function getPriority(pid?: number): number; + /** + * Attempts to set the scheduling priority for the process specified by `pid`. If`pid` is not provided or is `0`, the process ID of the current process is used. + * + * The `priority` input must be an integer between `-20` (high priority) and `19`(low priority). Due to differences between Unix priority levels and Windows + * priority classes, `priority` is mapped to one of six priority constants in`os.constants.priority`. When retrieving a process priority level, this range + * mapping may cause the return value to be slightly different on Windows. To avoid + * confusion, set `priority` to one of the priority constants. + * + * On Windows, setting priority to `PRIORITY_HIGHEST` requires elevated user + * privileges. Otherwise the set priority will be silently reduced to`PRIORITY_HIGH`. + * @since v10.10.0 + * @param [pid=0] The process ID to set scheduling priority for. + * @param priority The scheduling priority to assign to the process. + */ + function setPriority(priority: number): void; + function setPriority(pid: number, priority: number): void; +} +declare module 'node:os' { + export * from 'os'; +} diff --git a/node_modules/@types/node/package.json b/node_modules/@types/node/package.json new file mode 100755 index 0000000..3aad228 --- /dev/null +++ b/node_modules/@types/node/package.json @@ -0,0 +1,232 @@ +{ + "name": "@types/node", + "version": "18.11.3", + "description": "TypeScript definitions for Node.js", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node", + "license": "MIT", + "contributors": [ + { + "name": "Microsoft TypeScript", + "url": "https://github.com/Microsoft", + "githubUsername": "Microsoft" + }, + { + "name": "DefinitelyTyped", + "url": "https://github.com/DefinitelyTyped", + "githubUsername": "DefinitelyTyped" + }, + { + "name": "Alberto Schiabel", + "url": "https://github.com/jkomyno", + "githubUsername": "jkomyno" + }, + { + "name": "Alvis HT Tang", + "url": "https://github.com/alvis", + "githubUsername": "alvis" + }, + { + "name": "Andrew Makarov", + "url": "https://github.com/r3nya", + "githubUsername": "r3nya" + }, + { + "name": "Benjamin Toueg", + "url": "https://github.com/btoueg", + "githubUsername": "btoueg" + }, + { + "name": "Chigozirim C.", + "url": "https://github.com/smac89", + "githubUsername": "smac89" + }, + { + "name": "David Junger", + "url": "https://github.com/touffy", + "githubUsername": "touffy" + }, + { + "name": "Deividas Bakanas", + "url": "https://github.com/DeividasBakanas", + "githubUsername": "DeividasBakanas" + }, + { + "name": "Eugene Y. Q. Shen", + "url": "https://github.com/eyqs", + "githubUsername": "eyqs" + }, + { + "name": "Hannes Magnusson", + "url": "https://github.com/Hannes-Magnusson-CK", + "githubUsername": "Hannes-Magnusson-CK" + }, + { + "name": "Huw", + "url": "https://github.com/hoo29", + "githubUsername": "hoo29" + }, + { + "name": "Kelvin Jin", + "url": "https://github.com/kjin", + "githubUsername": "kjin" + }, + { + "name": "Klaus Meinhardt", + "url": "https://github.com/ajafff", + "githubUsername": "ajafff" + }, + { + "name": "Lishude", + "url": "https://github.com/islishude", + "githubUsername": "islishude" + }, + { + "name": "Mariusz Wiktorczyk", + "url": "https://github.com/mwiktorczyk", + "githubUsername": "mwiktorczyk" + }, + { + "name": "Mohsen Azimi", + "url": "https://github.com/mohsen1", + "githubUsername": "mohsen1" + }, + { + "name": "Nicolas Even", + "url": "https://github.com/n-e", + "githubUsername": "n-e" + }, + { + "name": "Nikita Galkin", + "url": "https://github.com/galkin", + "githubUsername": "galkin" + }, + { + "name": "Parambir Singh", + "url": "https://github.com/parambirs", + "githubUsername": "parambirs" + }, + { + "name": "Sebastian Silbermann", + "url": "https://github.com/eps1lon", + "githubUsername": "eps1lon" + }, + { + "name": "Simon Schick", + "url": "https://github.com/SimonSchick", + "githubUsername": "SimonSchick" + }, + { + "name": "Thomas den Hollander", + "url": "https://github.com/ThomasdenH", + "githubUsername": "ThomasdenH" + }, + { + "name": "Wilco Bakker", + "url": "https://github.com/WilcoBakker", + "githubUsername": "WilcoBakker" + }, + { + "name": "wwwy3y3", + "url": "https://github.com/wwwy3y3", + "githubUsername": "wwwy3y3" + }, + { + "name": "Samuel Ainsworth", + "url": "https://github.com/samuela", + "githubUsername": "samuela" + }, + { + "name": "Kyle Uehlein", + "url": "https://github.com/kuehlein", + "githubUsername": "kuehlein" + }, + { + "name": "Thanik Bhongbhibhat", + "url": "https://github.com/bhongy", + "githubUsername": "bhongy" + }, + { + "name": "Marcin Kopacz", + "url": "https://github.com/chyzwar", + "githubUsername": "chyzwar" + }, + { + "name": "Trivikram Kamat", + "url": "https://github.com/trivikr", + "githubUsername": "trivikr" + }, + { + "name": "Junxiao Shi", + "url": "https://github.com/yoursunny", + "githubUsername": "yoursunny" + }, + { + "name": "Ilia Baryshnikov", + "url": "https://github.com/qwelias", + "githubUsername": "qwelias" + }, + { + "name": "ExE Boss", + "url": "https://github.com/ExE-Boss", + "githubUsername": "ExE-Boss" + }, + { + "name": "Piotr Błażejewicz", + "url": "https://github.com/peterblazejewicz", + "githubUsername": "peterblazejewicz" + }, + { + "name": "Anna Henningsen", + "url": "https://github.com/addaleax", + "githubUsername": "addaleax" + }, + { + "name": "Victor Perin", + "url": "https://github.com/victorperin", + "githubUsername": "victorperin" + }, + { + "name": "Yongsheng Zhang", + "url": "https://github.com/ZYSzys", + "githubUsername": "ZYSzys" + }, + { + "name": "NodeJS Contributors", + "url": "https://github.com/NodeJS", + "githubUsername": "NodeJS" + }, + { + "name": "Linus Unnebäck", + "url": "https://github.com/LinusU", + "githubUsername": "LinusU" + }, + { + "name": "wafuwafu13", + "url": "https://github.com/wafuwafu13", + "githubUsername": "wafuwafu13" + }, + { + "name": "Matteo Collina", + "url": "https://github.com/mcollina", + "githubUsername": "mcollina" + } + ], + "main": "", + "types": "index.d.ts", + "typesVersions": { + "<4.9.0-0": { + "*": [ + "ts4.8/*" + ] + } + }, + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "1def7217eee83811dbb37996df5c17c6aa82ffd8bb1488a12aa3344fa6db8667", + "typeScriptVersion": "4.1" +} \ No newline at end of file diff --git a/node_modules/@types/node/path.d.ts b/node_modules/@types/node/path.d.ts new file mode 100755 index 0000000..1d33f79 --- /dev/null +++ b/node_modules/@types/node/path.d.ts @@ -0,0 +1,191 @@ +declare module 'path/posix' { + import path = require('path'); + export = path; +} +declare module 'path/win32' { + import path = require('path'); + export = path; +} +/** + * The `path` module provides utilities for working with file and directory paths. + * It can be accessed using: + * + * ```js + * const path = require('path'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/path.js) + */ +declare module 'path' { + namespace path { + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + interface FormatInputPathObject { + /** + * The root of the path such as '/' or 'c:\' + */ + root?: string | undefined; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir?: string | undefined; + /** + * The file name including extension (if any) such as 'index.html' + */ + base?: string | undefined; + /** + * The file extension (if any) such as '.html' + */ + ext?: string | undefined; + /** + * The file name without extension (if any) such as 'index' + */ + name?: string | undefined; + } + interface PlatformPath { + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param path string path to normalize. + * @throws {TypeError} if `path` is not a string. + */ + normalize(path: string): string; + /** + * Join all arguments together and normalize the resulting path. + * + * @param paths paths to join. + * @throws {TypeError} if any of the path segments is not a string. + */ + join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, + * until an absolute path is found. If after using all {from} paths still no absolute path is found, + * the current working directory is used as well. The resulting path is normalized, + * and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param paths A sequence of paths or path segments. + * @throws {TypeError} if any of the arguments is not a string. + */ + resolve(...paths: string[]): string; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * If the given {path} is a zero-length string, `false` will be returned. + * + * @param path path to test. + * @throws {TypeError} if `path` is not a string. + */ + isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to} based on the current working directory. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @throws {TypeError} if either `from` or `to` is not a string. + */ + relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + dirname(path: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param path the path to evaluate. + * @param suffix optionally, an extension to remove from the result. + * @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string. + */ + basename(path: string, suffix?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + extname(path: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + readonly sep: '\\' | '/'; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + readonly delimiter: ';' | ':'; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param path path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + parse(path: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathObject path to evaluate. + */ + format(pathObject: FormatInputPathObject): string; + /** + * On Windows systems only, returns an equivalent namespace-prefixed path for the given path. + * If path is not a string, path will be returned without modifications. + * This method is meaningful only on Windows system. + * On POSIX systems, the method is non-operational and always returns path without modifications. + */ + toNamespacedPath(path: string): string; + /** + * Posix specific pathing. + * Same as parent object on posix. + */ + readonly posix: PlatformPath; + /** + * Windows specific pathing. + * Same as parent object on windows + */ + readonly win32: PlatformPath; + } + } + const path: path.PlatformPath; + export = path; +} +declare module 'node:path' { + import path = require('path'); + export = path; +} +declare module 'node:path/posix' { + import path = require('path/posix'); + export = path; +} +declare module 'node:path/win32' { + import path = require('path/win32'); + export = path; +} diff --git a/node_modules/@types/node/perf_hooks.d.ts b/node_modules/@types/node/perf_hooks.d.ts new file mode 100755 index 0000000..5c0b228 --- /dev/null +++ b/node_modules/@types/node/perf_hooks.d.ts @@ -0,0 +1,625 @@ +/** + * This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for + * Node.js-specific performance measurements. + * + * Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/): + * + * * [High Resolution Time](https://www.w3.org/TR/hr-time-2) + * * [Performance Timeline](https://w3c.github.io/performance-timeline/) + * * [User Timing](https://www.w3.org/TR/user-timing/) + * + * ```js + * const { PerformanceObserver, performance } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((items) => { + * console.log(items.getEntries()[0].duration); + * performance.clearMarks(); + * }); + * obs.observe({ type: 'measure' }); + * performance.measure('Start to Now'); + * + * performance.mark('A'); + * doSomeLongRunningProcess(() => { + * performance.measure('A to Now', 'A'); + * + * performance.mark('B'); + * performance.measure('A to B', 'A', 'B'); + * }); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/perf_hooks.js) + */ +declare module 'perf_hooks' { + import { AsyncResource } from 'node:async_hooks'; + type EntryType = 'node' | 'mark' | 'measure' | 'gc' | 'function' | 'http2' | 'http'; + interface NodeGCPerformanceDetail { + /** + * When `performanceEntry.entryType` is equal to 'gc', `the performance.kind` property identifies + * the type of garbage collection operation that occurred. + * See perf_hooks.constants for valid values. + */ + readonly kind?: number | undefined; + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags` + * property contains additional information about garbage collection operation. + * See perf_hooks.constants for valid values. + */ + readonly flags?: number | undefined; + } + /** + * @since v8.5.0 + */ + class PerformanceEntry { + protected constructor(); + /** + * The total number of milliseconds elapsed for this entry. This value will not + * be meaningful for all Performance Entry types. + * @since v8.5.0 + */ + readonly duration: number; + /** + * The name of the performance entry. + * @since v8.5.0 + */ + readonly name: string; + /** + * The high resolution millisecond timestamp marking the starting time of the + * Performance Entry. + * @since v8.5.0 + */ + readonly startTime: number; + /** + * The type of the performance entry. It may be one of: + * + * * `'node'` (Node.js only) + * * `'mark'` (available on the Web) + * * `'measure'` (available on the Web) + * * `'gc'` (Node.js only) + * * `'function'` (Node.js only) + * * `'http2'` (Node.js only) + * * `'http'` (Node.js only) + * @since v8.5.0 + */ + readonly entryType: EntryType; + /** + * Additional detail specific to the `entryType`. + * @since v16.0.0 + */ + readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type. + toJSON(): any; + } + class PerformanceMark extends PerformanceEntry { + readonly duration: 0; + readonly entryType: 'mark'; + } + class PerformanceMeasure extends PerformanceEntry { + readonly entryType: 'measure'; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Provides timing details for Node.js itself. The constructor of this class + * is not exposed to users. + * @since v8.5.0 + */ + class PerformanceNodeTiming extends PerformanceEntry { + /** + * The high resolution millisecond timestamp at which the Node.js process + * completed bootstrapping. If bootstrapping has not yet finished, the property + * has the value of -1. + * @since v8.5.0 + */ + readonly bootstrapComplete: number; + /** + * The high resolution millisecond timestamp at which the Node.js environment was + * initialized. + * @since v8.5.0 + */ + readonly environment: number; + /** + * The high resolution millisecond timestamp of the amount of time the event loop + * has been idle within the event loop's event provider (e.g. `epoll_wait`). This + * does not take CPU usage into consideration. If the event loop has not yet + * started (e.g., in the first tick of the main script), the property has the + * value of 0. + * @since v14.10.0, v12.19.0 + */ + readonly idleTime: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * exited. If the event loop has not yet exited, the property has the value of -1\. + * It can only have a value of not -1 in a handler of the `'exit'` event. + * @since v8.5.0 + */ + readonly loopExit: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * started. If the event loop has not yet started (e.g., in the first tick of the + * main script), the property has the value of -1. + * @since v8.5.0 + */ + readonly loopStart: number; + /** + * The high resolution millisecond timestamp at which the V8 platform was + * initialized. + * @since v8.5.0 + */ + readonly v8Start: number; + } + interface EventLoopUtilization { + idle: number; + active: number; + utilization: number; + } + /** + * @param util1 The result of a previous call to eventLoopUtilization() + * @param util2 The result of a previous call to eventLoopUtilization() prior to util1 + */ + type EventLoopUtilityFunction = (util1?: EventLoopUtilization, util2?: EventLoopUtilization) => EventLoopUtilization; + interface MarkOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * An optional timestamp to be used as the mark time. + * @default `performance.now()`. + */ + startTime?: number | undefined; + } + interface MeasureOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * Duration between start and end times. + */ + duration?: number | undefined; + /** + * Timestamp to be used as the end time, or a string identifying a previously recorded mark. + */ + end?: number | string | undefined; + /** + * Timestamp to be used as the start time, or a string identifying a previously recorded mark. + */ + start?: number | string | undefined; + } + interface TimerifyOptions { + /** + * A histogram object created using + * `perf_hooks.createHistogram()` that will record runtime durations in + * nanoseconds. + */ + histogram?: RecordableHistogram | undefined; + } + interface Performance { + /** + * If name is not provided, removes all PerformanceMark objects from the Performance Timeline. + * If name is provided, removes only the named mark. + * @param name + */ + clearMarks(name?: string): void; + /** + * If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline. + * If name is provided, removes only the named measure. + * @param name + * @since v16.7.0 + */ + clearMeasures(name?: string): void; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`. + * If you are only interested in performance entries of certain types or that have certain names, see + * `performance.getEntriesByType()` and `performance.getEntriesByName()`. + * @since v16.7.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`. + * @param name + * @param type + * @since v16.7.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.entryType` is equal to `type`. + * @param type + * @since v16.7.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + /** + * Creates a new PerformanceMark entry in the Performance Timeline. + * A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark', + * and whose performanceEntry.duration is always 0. + * Performance marks are used to mark specific significant moments in the Performance Timeline. + * @param name + * @return The PerformanceMark entry that was created + */ + mark(name?: string, options?: MarkOptions): PerformanceMark; + /** + * Creates a new PerformanceMeasure entry in the Performance Timeline. + * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', + * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. + * + * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify + * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, + * then startMark is set to timeOrigin by default. + * + * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp + * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. + * @param name + * @param startMark + * @param endMark + * @return The PerformanceMeasure entry that was created + */ + measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure; + measure(name: string, options: MeasureOptions): PerformanceMeasure; + /** + * An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones. + */ + readonly nodeTiming: PerformanceNodeTiming; + /** + * @return the current high resolution millisecond timestamp + */ + now(): number; + /** + * The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured. + */ + readonly timeOrigin: number; + /** + * Wraps a function within a new function that measures the running time of the wrapped function. + * A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed. + * @param fn + */ + timerify any>(fn: T, options?: TimerifyOptions): T; + /** + * eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time. + * It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait). + * No other CPU idle time is taken into consideration. + */ + eventLoopUtilization: EventLoopUtilityFunction; + } + interface PerformanceObserverEntryList { + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime`. + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntries()); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 81.465639, + * * duration: 0 + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 81.860064, + * * duration: 0 + * * } + * * ] + * + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is + * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`. + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByName('meow')); + * + * * [ + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 98.545991, + * * duration: 0 + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('nope')); // [] + * + * console.log(perfObserverList.getEntriesByName('test', 'mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 63.518931, + * * duration: 0 + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('test', 'measure')); // [] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ entryTypes: ['mark', 'measure'] }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`is equal to `type`. + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByType('mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 55.897834, + * * duration: 0 + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 56.350146, + * * duration: 0 + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + } + type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; + class PerformanceObserver extends AsyncResource { + constructor(callback: PerformanceObserverCallback); + /** + * Disconnects the `PerformanceObserver` instance from all notifications. + * @since v8.5.0 + */ + disconnect(): void; + /** + * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes`or `options.type`: + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((list, observer) => { + * // Called once asynchronously. `list` contains three items. + * }); + * obs.observe({ type: 'mark' }); + * + * for (let n = 0; n < 3; n++) + * performance.mark(`test${n}`); + * ``` + * @since v8.5.0 + */ + observe( + options: + | { + entryTypes: ReadonlyArray; + buffered?: boolean | undefined; + } + | { + type: EntryType; + buffered?: boolean | undefined; + } + ): void; + } + namespace constants { + const NODE_PERFORMANCE_GC_MAJOR: number; + const NODE_PERFORMANCE_GC_MINOR: number; + const NODE_PERFORMANCE_GC_INCREMENTAL: number; + const NODE_PERFORMANCE_GC_WEAKCB: number; + const NODE_PERFORMANCE_GC_FLAGS_NO: number; + const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number; + const NODE_PERFORMANCE_GC_FLAGS_FORCED: number; + const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number; + const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number; + } + const performance: Performance; + interface EventLoopMonitorOptions { + /** + * The sampling rate in milliseconds. + * Must be greater than zero. + * @default 10 + */ + resolution?: number | undefined; + } + interface Histogram { + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v11.10.0 + */ + readonly percentiles: Map; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event + * loop delay threshold. + * @since v11.10.0 + */ + readonly exceeds: number; + /** + * The minimum recorded event loop delay. + * @since v11.10.0 + */ + readonly min: number; + /** + * The maximum recorded event loop delay. + * @since v11.10.0 + */ + readonly max: number; + /** + * The mean of the recorded event loop delays. + * @since v11.10.0 + */ + readonly mean: number; + /** + * The standard deviation of the recorded event loop delays. + * @since v11.10.0 + */ + readonly stddev: number; + /** + * Resets the collected histogram data. + * @since v11.10.0 + */ + reset(): void; + /** + * Returns the value at the given percentile. + * @since v11.10.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentile(percentile: number): number; + } + interface IntervalHistogram extends Histogram { + /** + * Enables the update interval timer. Returns `true` if the timer was + * started, `false` if it was already started. + * @since v11.10.0 + */ + enable(): boolean; + /** + * Disables the update interval timer. Returns `true` if the timer was + * stopped, `false` if it was already stopped. + * @since v11.10.0 + */ + disable(): boolean; + } + interface RecordableHistogram extends Histogram { + /** + * @since v15.9.0, v14.18.0 + * @param val The amount to record in the histogram. + */ + record(val: number | bigint): void; + /** + * Calculates the amount of time (in nanoseconds) that has passed since the + * previous call to `recordDelta()` and records that amount in the histogram. + * + * ## Examples + * @since v15.9.0, v14.18.0 + */ + recordDelta(): void; + /** + * Adds the values from other to this histogram. + * @since v17.4.0, v16.14.0 + * @param other Recordable Histogram to combine with + */ + add(other: RecordableHistogram): void; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Creates an `IntervalHistogram` object that samples and reports the event loop + * delay over time. The delays will be reported in nanoseconds. + * + * Using a timer to detect approximate event loop delay works because the + * execution of timers is tied specifically to the lifecycle of the libuv + * event loop. That is, a delay in the loop will cause a delay in the execution + * of the timer, and those delays are specifically what this API is intended to + * detect. + * + * ```js + * const { monitorEventLoopDelay } = require('perf_hooks'); + * const h = monitorEventLoopDelay({ resolution: 20 }); + * h.enable(); + * // Do something. + * h.disable(); + * console.log(h.min); + * console.log(h.max); + * console.log(h.mean); + * console.log(h.stddev); + * console.log(h.percentiles); + * console.log(h.percentile(50)); + * console.log(h.percentile(99)); + * ``` + * @since v11.10.0 + */ + function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram; + interface CreateHistogramOptions { + /** + * The minimum recordable value. Must be an integer value greater than 0. + * @default 1 + */ + min?: number | bigint | undefined; + /** + * The maximum recordable value. Must be an integer value greater than min. + * @default Number.MAX_SAFE_INTEGER + */ + max?: number | bigint | undefined; + /** + * The number of accuracy digits. Must be a number between 1 and 5. + * @default 3 + */ + figures?: number | undefined; + } + /** + * Returns a `RecordableHistogram`. + * @since v15.9.0, v14.18.0 + */ + function createHistogram(options?: CreateHistogramOptions): RecordableHistogram; + + import { performance as _performance } from 'perf_hooks'; + global { + /** + * `performance` is a global reference for `require('perf_hooks').performance` + * https://nodejs.org/api/globals.html#performance + * @since v16.0.0 + */ + var performance: typeof globalThis extends { + onmessage: any; + performance: infer T; + } + ? T + : typeof _performance; + } +} +declare module 'node:perf_hooks' { + export * from 'perf_hooks'; +} diff --git a/node_modules/@types/node/process.d.ts b/node_modules/@types/node/process.d.ts new file mode 100755 index 0000000..12148f9 --- /dev/null +++ b/node_modules/@types/node/process.d.ts @@ -0,0 +1,1482 @@ +declare module 'process' { + import * as tty from 'node:tty'; + import { Worker } from 'node:worker_threads'; + global { + var process: NodeJS.Process; + namespace NodeJS { + // this namespace merge is here because these are specifically used + // as the type for process.stdin, process.stdout, and process.stderr. + // they can't live in tty.d.ts because we need to disambiguate the imported name. + interface ReadStream extends tty.ReadStream {} + interface WriteStream extends tty.WriteStream {} + interface MemoryUsageFn { + /** + * The `process.memoryUsage()` method iterate over each page to gather informations about memory + * usage which can be slow depending on the program memory allocations. + */ + (): MemoryUsage; + /** + * method returns an integer representing the Resident Set Size (RSS) in bytes. + */ + rss(): number; + } + interface MemoryUsage { + rss: number; + heapTotal: number; + heapUsed: number; + external: number; + arrayBuffers: number; + } + interface CpuUsage { + user: number; + system: number; + } + interface ProcessRelease { + name: string; + sourceUrl?: string | undefined; + headersUrl?: string | undefined; + libUrl?: string | undefined; + lts?: string | undefined; + } + interface ProcessVersions extends Dict { + http_parser: string; + node: string; + v8: string; + ares: string; + uv: string; + zlib: string; + modules: string; + openssl: string; + } + type Platform = 'aix' | 'android' | 'darwin' | 'freebsd' | 'haiku' | 'linux' | 'openbsd' | 'sunos' | 'win32' | 'cygwin' | 'netbsd'; + type Architecture = 'arm' | 'arm64' | 'ia32' | 'mips' | 'mipsel' | 'ppc' | 'ppc64' | 's390' | 's390x' | 'x64'; + type Signals = + | 'SIGABRT' + | 'SIGALRM' + | 'SIGBUS' + | 'SIGCHLD' + | 'SIGCONT' + | 'SIGFPE' + | 'SIGHUP' + | 'SIGILL' + | 'SIGINT' + | 'SIGIO' + | 'SIGIOT' + | 'SIGKILL' + | 'SIGPIPE' + | 'SIGPOLL' + | 'SIGPROF' + | 'SIGPWR' + | 'SIGQUIT' + | 'SIGSEGV' + | 'SIGSTKFLT' + | 'SIGSTOP' + | 'SIGSYS' + | 'SIGTERM' + | 'SIGTRAP' + | 'SIGTSTP' + | 'SIGTTIN' + | 'SIGTTOU' + | 'SIGUNUSED' + | 'SIGURG' + | 'SIGUSR1' + | 'SIGUSR2' + | 'SIGVTALRM' + | 'SIGWINCH' + | 'SIGXCPU' + | 'SIGXFSZ' + | 'SIGBREAK' + | 'SIGLOST' + | 'SIGINFO'; + type UncaughtExceptionOrigin = 'uncaughtException' | 'unhandledRejection'; + type MultipleResolveType = 'resolve' | 'reject'; + type BeforeExitListener = (code: number) => void; + type DisconnectListener = () => void; + type ExitListener = (code: number) => void; + type RejectionHandledListener = (promise: Promise) => void; + type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void; + /** + * Most of the time the unhandledRejection will be an Error, but this should not be relied upon + * as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error. + */ + type UnhandledRejectionListener = (reason: unknown, promise: Promise) => void; + type WarningListener = (warning: Error) => void; + type MessageListener = (message: unknown, sendHandle: unknown) => void; + type SignalsListener = (signal: Signals) => void; + type MultipleResolveListener = (type: MultipleResolveType, promise: Promise, value: unknown) => void; + type WorkerListener = (worker: Worker) => void; + interface Socket extends ReadWriteStream { + isTTY?: true | undefined; + } + // Alias for compatibility + interface ProcessEnv extends Dict { + /** + * Can be used to change the default timezone at runtime + */ + TZ?: string; + } + interface HRTime { + (time?: [number, number]): [number, number]; + bigint(): bigint; + } + interface ProcessReport { + /** + * Directory where the report is written. + * working directory of the Node.js process. + * @default '' indicating that reports are written to the current + */ + directory: string; + /** + * Filename where the report is written. + * The default value is the empty string. + * @default '' the output filename will be comprised of a timestamp, + * PID, and sequence number. + */ + filename: string; + /** + * Returns a JSON-formatted diagnostic report for the running process. + * The report's JavaScript stack trace is taken from err, if present. + */ + getReport(err?: Error): string; + /** + * If true, a diagnostic report is generated on fatal errors, + * such as out of memory errors or failed C++ assertions. + * @default false + */ + reportOnFatalError: boolean; + /** + * If true, a diagnostic report is generated when the process + * receives the signal specified by process.report.signal. + * @default false + */ + reportOnSignal: boolean; + /** + * If true, a diagnostic report is generated on uncaught exception. + * @default false + */ + reportOnUncaughtException: boolean; + /** + * The signal used to trigger the creation of a diagnostic report. + * @default 'SIGUSR2' + */ + signal: Signals; + /** + * Writes a diagnostic report to a file. If filename is not provided, the default filename + * includes the date, time, PID, and a sequence number. + * The report's JavaScript stack trace is taken from err, if present. + * + * @param fileName Name of the file where the report is written. + * This should be a relative path, that will be appended to the directory specified in + * `process.report.directory`, or the current working directory of the Node.js process, + * if unspecified. + * @param error A custom error used for reporting the JavaScript stack. + * @return Filename of the generated report. + */ + writeReport(fileName?: string): string; + writeReport(error?: Error): string; + writeReport(fileName?: string, err?: Error): string; + } + interface ResourceUsage { + fsRead: number; + fsWrite: number; + involuntaryContextSwitches: number; + ipcReceived: number; + ipcSent: number; + majorPageFault: number; + maxRSS: number; + minorPageFault: number; + sharedMemorySize: number; + signalsCount: number; + swappedOut: number; + systemCPUTime: number; + unsharedDataSize: number; + unsharedStackSize: number; + userCPUTime: number; + voluntaryContextSwitches: number; + } + interface EmitWarningOptions { + /** + * When `warning` is a `string`, `type` is the name to use for the _type_ of warning being emitted. + * + * @default 'Warning' + */ + type?: string | undefined; + /** + * A unique identifier for the warning instance being emitted. + */ + code?: string | undefined; + /** + * When `warning` is a `string`, `ctor` is an optional function used to limit the generated stack trace. + * + * @default process.emitWarning + */ + ctor?: Function | undefined; + /** + * Additional text to include with the error. + */ + detail?: string | undefined; + } + interface ProcessConfig { + readonly target_defaults: { + readonly cflags: any[]; + readonly default_configuration: string; + readonly defines: string[]; + readonly include_dirs: string[]; + readonly libraries: string[]; + }; + readonly variables: { + readonly clang: number; + readonly host_arch: string; + readonly node_install_npm: boolean; + readonly node_install_waf: boolean; + readonly node_prefix: string; + readonly node_shared_openssl: boolean; + readonly node_shared_v8: boolean; + readonly node_shared_zlib: boolean; + readonly node_use_dtrace: boolean; + readonly node_use_etw: boolean; + readonly node_use_openssl: boolean; + readonly target_arch: string; + readonly v8_no_strict_aliasing: number; + readonly v8_use_snapshot: boolean; + readonly visibility: string; + }; + } + interface Process extends EventEmitter { + /** + * The `process.stdout` property returns a stream connected to`stdout` (fd `1`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `1` refers to a file, in which case it is + * a `Writable` stream. + * + * For example, to copy `process.stdin` to `process.stdout`: + * + * ```js + * import { stdin, stdout } from 'process'; + * + * stdin.pipe(stdout); + * ``` + * + * `process.stdout` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stdout: WriteStream & { + fd: 1; + }; + /** + * The `process.stderr` property returns a stream connected to`stderr` (fd `2`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `2` refers to a file, in which case it is + * a `Writable` stream. + * + * `process.stderr` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stderr: WriteStream & { + fd: 2; + }; + /** + * The `process.stdin` property returns a stream connected to`stdin` (fd `0`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `0` refers to a file, in which case it is + * a `Readable` stream. + * + * For details of how to read from `stdin` see `readable.read()`. + * + * As a `Duplex` stream, `process.stdin` can also be used in "old" mode that + * is compatible with scripts written for Node.js prior to v0.10\. + * For more information see `Stream compatibility`. + * + * In "old" streams mode the `stdin` stream is paused by default, so one + * must call `process.stdin.resume()` to read from it. Note also that calling`process.stdin.resume()` itself would switch stream to "old" mode. + */ + stdin: ReadStream & { + fd: 0; + }; + openStdin(): Socket; + /** + * The `process.argv` property returns an array containing the command-line + * arguments passed when the Node.js process was launched. The first element will + * be {@link execPath}. See `process.argv0` if access to the original value + * of `argv[0]` is needed. The second element will be the path to the JavaScript + * file being executed. The remaining elements will be any additional command-line + * arguments. + * + * For example, assuming the following script for `process-args.js`: + * + * ```js + * import { argv } from 'process'; + * + * // print process.argv + * argv.forEach((val, index) => { + * console.log(`${index}: ${val}`); + * }); + * ``` + * + * Launching the Node.js process as: + * + * ```console + * $ node process-args.js one two=three four + * ``` + * + * Would generate the output: + * + * ```text + * 0: /usr/local/bin/node + * 1: /Users/mjr/work/node/process-args.js + * 2: one + * 3: two=three + * 4: four + * ``` + * @since v0.1.27 + */ + argv: string[]; + /** + * The `process.argv0` property stores a read-only copy of the original value of`argv[0]` passed when Node.js starts. + * + * ```console + * $ bash -c 'exec -a customArgv0 ./node' + * > process.argv[0] + * '/Volumes/code/external/node/out/Release/node' + * > process.argv0 + * 'customArgv0' + * ``` + * @since v6.4.0 + */ + argv0: string; + /** + * The `process.execArgv` property returns the set of Node.js-specific command-line + * options passed when the Node.js process was launched. These options do not + * appear in the array returned by the {@link argv} property, and do not + * include the Node.js executable, the name of the script, or any options following + * the script name. These options are useful in order to spawn child processes with + * the same execution environment as the parent. + * + * ```console + * $ node --harmony script.js --version + * ``` + * + * Results in `process.execArgv`: + * + * ```js + * ['--harmony'] + * ``` + * + * And `process.argv`: + * + * ```js + * ['/usr/local/bin/node', 'script.js', '--version'] + * ``` + * + * Refer to `Worker constructor` for the detailed behavior of worker + * threads with this property. + * @since v0.7.7 + */ + execArgv: string[]; + /** + * The `process.execPath` property returns the absolute pathname of the executable + * that started the Node.js process. Symbolic links, if any, are resolved. + * + * ```js + * '/usr/local/bin/node' + * ``` + * @since v0.1.100 + */ + execPath: string; + /** + * The `process.abort()` method causes the Node.js process to exit immediately and + * generate a core file. + * + * This feature is not available in `Worker` threads. + * @since v0.7.0 + */ + abort(): never; + /** + * The `process.chdir()` method changes the current working directory of the + * Node.js process or throws an exception if doing so fails (for instance, if + * the specified `directory` does not exist). + * + * ```js + * import { chdir, cwd } from 'process'; + * + * console.log(`Starting directory: ${cwd()}`); + * try { + * chdir('/tmp'); + * console.log(`New directory: ${cwd()}`); + * } catch (err) { + * console.error(`chdir: ${err}`); + * } + * ``` + * + * This feature is not available in `Worker` threads. + * @since v0.1.17 + */ + chdir(directory: string): void; + /** + * The `process.cwd()` method returns the current working directory of the Node.js + * process. + * + * ```js + * import { cwd } from 'process'; + * + * console.log(`Current directory: ${cwd()}`); + * ``` + * @since v0.1.8 + */ + cwd(): string; + /** + * The port used by the Node.js debugger when enabled. + * + * ```js + * import process from 'process'; + * + * process.debugPort = 5858; + * ``` + * @since v0.7.2 + */ + debugPort: number; + /** + * The `process.emitWarning()` method can be used to emit custom or application + * specific process warnings. These can be listened for by adding a handler to the `'warning'` event. + * + * ```js + * import { emitWarning } from 'process'; + * + * // Emit a warning with a code and additional detail. + * emitWarning('Something happened!', { + * code: 'MY_WARNING', + * detail: 'This is some additional information' + * }); + * // Emits: + * // (node:56338) [MY_WARNING] Warning: Something happened! + * // This is some additional information + * ``` + * + * In this example, an `Error` object is generated internally by`process.emitWarning()` and passed through to the `'warning'` handler. + * + * ```js + * import process from 'process'; + * + * process.on('warning', (warning) => { + * console.warn(warning.name); // 'Warning' + * console.warn(warning.message); // 'Something happened!' + * console.warn(warning.code); // 'MY_WARNING' + * console.warn(warning.stack); // Stack trace + * console.warn(warning.detail); // 'This is some additional information' + * }); + * ``` + * + * If `warning` is passed as an `Error` object, the `options` argument is ignored. + * @since v8.0.0 + * @param warning The warning to emit. + */ + emitWarning(warning: string | Error, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, code?: string, ctor?: Function): void; + emitWarning(warning: string | Error, options?: EmitWarningOptions): void; + /** + * The `process.env` property returns an object containing the user environment. + * See [`environ(7)`](http://man7.org/linux/man-pages/man7/environ.7.html). + * + * An example of this object looks like: + * + * ```js + * { + * TERM: 'xterm-256color', + * SHELL: '/usr/local/bin/bash', + * USER: 'maciej', + * PATH: '~/.bin/:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin', + * PWD: '/Users/maciej', + * EDITOR: 'vim', + * SHLVL: '1', + * HOME: '/Users/maciej', + * LOGNAME: 'maciej', + * _: '/usr/local/bin/node' + * } + * ``` + * + * It is possible to modify this object, but such modifications will not be + * reflected outside the Node.js process, or (unless explicitly requested) + * to other `Worker` threads. + * In other words, the following example would not work: + * + * ```console + * $ node -e 'process.env.foo = "bar"' && echo $foo + * ``` + * + * While the following will: + * + * ```js + * import { env } from 'process'; + * + * env.foo = 'bar'; + * console.log(env.foo); + * ``` + * + * Assigning a property on `process.env` will implicitly convert the value + * to a string. **This behavior is deprecated.** Future versions of Node.js may + * throw an error when the value is not a string, number, or boolean. + * + * ```js + * import { env } from 'process'; + * + * env.test = null; + * console.log(env.test); + * // => 'null' + * env.test = undefined; + * console.log(env.test); + * // => 'undefined' + * ``` + * + * Use `delete` to delete a property from `process.env`. + * + * ```js + * import { env } from 'process'; + * + * env.TEST = 1; + * delete env.TEST; + * console.log(env.TEST); + * // => undefined + * ``` + * + * On Windows operating systems, environment variables are case-insensitive. + * + * ```js + * import { env } from 'process'; + * + * env.TEST = 1; + * console.log(env.test); + * // => 1 + * ``` + * + * Unless explicitly specified when creating a `Worker` instance, + * each `Worker` thread has its own copy of `process.env`, based on its + * parent thread’s `process.env`, or whatever was specified as the `env` option + * to the `Worker` constructor. Changes to `process.env` will not be visible + * across `Worker` threads, and only the main thread can make changes that + * are visible to the operating system or to native add-ons. + * @since v0.1.27 + */ + env: ProcessEnv; + /** + * The `process.exit()` method instructs Node.js to terminate the process + * synchronously with an exit status of `code`. If `code` is omitted, exit uses + * either the 'success' code `0` or the value of `process.exitCode` if it has been + * set. Node.js will not terminate until all the `'exit'` event listeners are + * called. + * + * To exit with a 'failure' code: + * + * ```js + * import { exit } from 'process'; + * + * exit(1); + * ``` + * + * The shell that executed Node.js should see the exit code as `1`. + * + * Calling `process.exit()` will force the process to exit as quickly as possible + * even if there are still asynchronous operations pending that have not yet + * completed fully, including I/O operations to `process.stdout` and`process.stderr`. + * + * In most situations, it is not actually necessary to call `process.exit()`explicitly. The Node.js process will exit on its own _if there is no additional_ + * _work pending_ in the event loop. The `process.exitCode` property can be set to + * tell the process which exit code to use when the process exits gracefully. + * + * For instance, the following example illustrates a _misuse_ of the`process.exit()` method that could lead to data printed to stdout being + * truncated and lost: + * + * ```js + * import { exit } from 'process'; + * + * // This is an example of what *not* to do: + * if (someConditionNotMet()) { + * printUsageToStdout(); + * exit(1); + * } + * ``` + * + * The reason this is problematic is because writes to `process.stdout` in Node.js + * are sometimes _asynchronous_ and may occur over multiple ticks of the Node.js + * event loop. Calling `process.exit()`, however, forces the process to exit _before_ those additional writes to `stdout` can be performed. + * + * Rather than calling `process.exit()` directly, the code _should_ set the`process.exitCode` and allow the process to exit naturally by avoiding + * scheduling any additional work for the event loop: + * + * ```js + * import process from 'process'; + * + * // How to properly set the exit code while letting + * // the process exit gracefully. + * if (someConditionNotMet()) { + * printUsageToStdout(); + * process.exitCode = 1; + * } + * ``` + * + * If it is necessary to terminate the Node.js process due to an error condition, + * throwing an _uncaught_ error and allowing the process to terminate accordingly + * is safer than calling `process.exit()`. + * + * In `Worker` threads, this function stops the current thread rather + * than the current process. + * @since v0.1.13 + * @param [code=0] The exit code. + */ + exit(code?: number): never; + /** + * A number which will be the process exit code, when the process either + * exits gracefully, or is exited via {@link exit} without specifying + * a code. + * + * Specifying a code to {@link exit} will override any + * previous setting of `process.exitCode`. + * @since v0.11.8 + */ + exitCode?: number | undefined; + /** + * The `process.getgid()` method returns the numerical group identity of the + * process. (See [`getgid(2)`](http://man7.org/linux/man-pages/man2/getgid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getgid) { + * console.log(`Current gid: ${process.getgid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.31 + */ + getgid?: () => number; + /** + * The `process.setgid()` method sets the group identity of the process. (See [`setgid(2)`](http://man7.org/linux/man-pages/man2/setgid.2.html).) The `id` can be passed as either a + * numeric ID or a group name + * string. If a group name is specified, this method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getgid && process.setgid) { + * console.log(`Current gid: ${process.getgid()}`); + * try { + * process.setgid(501); + * console.log(`New gid: ${process.getgid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.31 + * @param id The group name or ID + */ + setgid?: (id: number | string) => void; + /** + * The `process.getuid()` method returns the numeric user identity of the process. + * (See [`getuid(2)`](http://man7.org/linux/man-pages/man2/getuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getuid) { + * console.log(`Current uid: ${process.getuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.28 + */ + getuid?: () => number; + /** + * The `process.setuid(id)` method sets the user identity of the process. (See [`setuid(2)`](http://man7.org/linux/man-pages/man2/setuid.2.html).) The `id` can be passed as either a + * numeric ID or a username string. + * If a username is specified, the method blocks while resolving the associated + * numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getuid && process.setuid) { + * console.log(`Current uid: ${process.getuid()}`); + * try { + * process.setuid(501); + * console.log(`New uid: ${process.getuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.28 + */ + setuid?: (id: number | string) => void; + /** + * The `process.geteuid()` method returns the numerical effective user identity of + * the process. (See [`geteuid(2)`](http://man7.org/linux/man-pages/man2/geteuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.geteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + geteuid?: () => number; + /** + * The `process.seteuid()` method sets the effective user identity of the process. + * (See [`seteuid(2)`](http://man7.org/linux/man-pages/man2/seteuid.2.html).) The `id` can be passed as either a numeric ID or a username + * string. If a username is specified, the method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.geteuid && process.seteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * try { + * process.seteuid(501); + * console.log(`New uid: ${process.geteuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A user name or ID + */ + seteuid?: (id: number | string) => void; + /** + * The `process.getegid()` method returns the numerical effective group identity + * of the Node.js process. (See [`getegid(2)`](http://man7.org/linux/man-pages/man2/getegid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getegid) { + * console.log(`Current gid: ${process.getegid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + getegid?: () => number; + /** + * The `process.setegid()` method sets the effective group identity of the process. + * (See [`setegid(2)`](http://man7.org/linux/man-pages/man2/setegid.2.html).) The `id` can be passed as either a numeric ID or a group + * name string. If a group name is specified, this method blocks while resolving + * the associated a numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getegid && process.setegid) { + * console.log(`Current gid: ${process.getegid()}`); + * try { + * process.setegid(501); + * console.log(`New gid: ${process.getegid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A group name or ID + */ + setegid?: (id: number | string) => void; + /** + * The `process.getgroups()` method returns an array with the supplementary group + * IDs. POSIX leaves it unspecified if the effective group ID is included but + * Node.js ensures it always is. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups) { + * console.log(process.getgroups()); // [ 16, 21, 297 ] + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.9.4 + */ + getgroups?: () => number[]; + /** + * The `process.setgroups()` method sets the supplementary group IDs for the + * Node.js process. This is a privileged operation that requires the Node.js + * process to have `root` or the `CAP_SETGID` capability. + * + * The `groups` array can contain numeric group IDs, group names, or both. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups && process.setgroups) { + * try { + * process.setgroups([501]); + * console.log(process.getgroups()); // new groups + * } catch (err) { + * console.log(`Failed to set groups: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.9.4 + */ + setgroups?: (groups: ReadonlyArray) => void; + /** + * The `process.setUncaughtExceptionCaptureCallback()` function sets a function + * that will be invoked when an uncaught exception occurs, which will receive the + * exception value itself as its first argument. + * + * If such a function is set, the `'uncaughtException'` event will + * not be emitted. If `--abort-on-uncaught-exception` was passed from the + * command line or set through `v8.setFlagsFromString()`, the process will + * not abort. Actions configured to take place on exceptions such as report + * generations will be affected too + * + * To unset the capture function,`process.setUncaughtExceptionCaptureCallback(null)` may be used. Calling this + * method with a non-`null` argument while another capture function is set will + * throw an error. + * + * Using this function is mutually exclusive with using the deprecated `domain` built-in module. + * @since v9.3.0 + */ + setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; + /** + * Indicates whether a callback has been set using {@link setUncaughtExceptionCaptureCallback}. + * @since v9.3.0 + */ + hasUncaughtExceptionCaptureCallback(): boolean; + /** + * The `process.version` property contains the Node.js version string. + * + * ```js + * import { version } from 'process'; + * + * console.log(`Version: ${version}`); + * // Version: v14.8.0 + * ``` + * + * To get the version string without the prepended _v_, use`process.versions.node`. + * @since v0.1.3 + */ + readonly version: string; + /** + * The `process.versions` property returns an object listing the version strings of + * Node.js and its dependencies. `process.versions.modules` indicates the current + * ABI version, which is increased whenever a C++ API changes. Node.js will refuse + * to load modules that were compiled against a different module ABI version. + * + * ```js + * import { versions } from 'process'; + * + * console.log(versions); + * ``` + * + * Will generate an object similar to: + * + * ```console + * { node: '11.13.0', + * v8: '7.0.276.38-node.18', + * uv: '1.27.0', + * zlib: '1.2.11', + * brotli: '1.0.7', + * ares: '1.15.0', + * modules: '67', + * nghttp2: '1.34.0', + * napi: '4', + * llhttp: '1.1.1', + * openssl: '1.1.1b', + * cldr: '34.0', + * icu: '63.1', + * tz: '2018e', + * unicode: '11.0' } + * ``` + * @since v0.2.0 + */ + readonly versions: ProcessVersions; + /** + * The `process.config` property returns an `Object` containing the JavaScript + * representation of the configure options used to compile the current Node.js + * executable. This is the same as the `config.gypi` file that was produced when + * running the `./configure` script. + * + * An example of the possible output looks like: + * + * ```js + * { + * target_defaults: + * { cflags: [], + * default_configuration: 'Release', + * defines: [], + * include_dirs: [], + * libraries: [] }, + * variables: + * { + * host_arch: 'x64', + * napi_build_version: 5, + * node_install_npm: 'true', + * node_prefix: '', + * node_shared_cares: 'false', + * node_shared_http_parser: 'false', + * node_shared_libuv: 'false', + * node_shared_zlib: 'false', + * node_use_dtrace: 'false', + * node_use_openssl: 'true', + * node_shared_openssl: 'false', + * strict_aliasing: 'true', + * target_arch: 'x64', + * v8_use_snapshot: 1 + * } + * } + * ``` + * + * The `process.config` property is **not** read-only and there are existing + * modules in the ecosystem that are known to extend, modify, or entirely replace + * the value of `process.config`. + * + * Modifying the `process.config` property, or any child-property of the`process.config` object has been deprecated. The `process.config` will be made + * read-only in a future release. + * @since v0.7.7 + */ + readonly config: ProcessConfig; + /** + * The `process.kill()` method sends the `signal` to the process identified by`pid`. + * + * Signal names are strings such as `'SIGINT'` or `'SIGHUP'`. See `Signal Events` and [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for more information. + * + * This method will throw an error if the target `pid` does not exist. As a special + * case, a signal of `0` can be used to test for the existence of a process. + * Windows platforms will throw an error if the `pid` is used to kill a process + * group. + * + * Even though the name of this function is `process.kill()`, it is really just a + * signal sender, like the `kill` system call. The signal sent may do something + * other than kill the target process. + * + * ```js + * import process, { kill } from 'process'; + * + * process.on('SIGHUP', () => { + * console.log('Got SIGHUP signal.'); + * }); + * + * setTimeout(() => { + * console.log('Exiting.'); + * process.exit(0); + * }, 100); + * + * kill(process.pid, 'SIGHUP'); + * ``` + * + * When `SIGUSR1` is received by a Node.js process, Node.js will start the + * debugger. See `Signal Events`. + * @since v0.0.6 + * @param pid A process ID + * @param [signal='SIGTERM'] The signal to send, either as a string or number. + */ + kill(pid: number, signal?: string | number): true; + /** + * The `process.pid` property returns the PID of the process. + * + * ```js + * import { pid } from 'process'; + * + * console.log(`This process is pid ${pid}`); + * ``` + * @since v0.1.15 + */ + readonly pid: number; + /** + * The `process.ppid` property returns the PID of the parent of the + * current process. + * + * ```js + * import { ppid } from 'process'; + * + * console.log(`The parent process is pid ${ppid}`); + * ``` + * @since v9.2.0, v8.10.0, v6.13.0 + */ + readonly ppid: number; + /** + * The `process.title` property returns the current process title (i.e. returns + * the current value of `ps`). Assigning a new value to `process.title` modifies + * the current value of `ps`. + * + * When a new value is assigned, different platforms will impose different maximum + * length restrictions on the title. Usually such restrictions are quite limited. + * For instance, on Linux and macOS, `process.title` is limited to the size of the + * binary name plus the length of the command-line arguments because setting the`process.title` overwrites the `argv` memory of the process. Node.js v0.8 + * allowed for longer process title strings by also overwriting the `environ`memory but that was potentially insecure and confusing in some (rather obscure) + * cases. + * + * Assigning a value to `process.title` might not result in an accurate label + * within process manager applications such as macOS Activity Monitor or Windows + * Services Manager. + * @since v0.1.104 + */ + title: string; + /** + * The operating system CPU architecture for which the Node.js binary was compiled. + * Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'mips'`,`'mipsel'`, `'ppc'`,`'ppc64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * ```js + * import { arch } from 'process'; + * + * console.log(`This processor architecture is ${arch}`); + * ``` + * @since v0.5.0 + */ + readonly arch: Architecture; + /** + * The `process.platform` property returns a string identifying the operating + * system platform for which the Node.js binary was compiled. + * + * Currently possible values are: + * + * * `'aix'` + * * `'darwin'` + * * `'freebsd'` + * * `'linux'` + * * `'openbsd'` + * * `'sunos'` + * * `'win32'` + * + * ```js + * import { platform } from 'process'; + * + * console.log(`This platform is ${platform}`); + * ``` + * + * The value `'android'` may also be returned if the Node.js is built on the + * Android operating system. However, Android support in Node.js [is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.1.16 + */ + readonly platform: Platform; + /** + * The `process.mainModule` property provides an alternative way of retrieving `require.main`. The difference is that if the main module changes at + * runtime, `require.main` may still refer to the original main module in + * modules that were required before the change occurred. Generally, it's + * safe to assume that the two refer to the same module. + * + * As with `require.main`, `process.mainModule` will be `undefined` if there + * is no entry script. + * @since v0.1.17 + * @deprecated Since v14.0.0 - Use `main` instead. + */ + mainModule?: Module | undefined; + memoryUsage: MemoryUsageFn; + /** + * The `process.cpuUsage()` method returns the user and system CPU time usage of + * the current process, in an object with properties `user` and `system`, whose + * values are microsecond values (millionth of a second). These values measure time + * spent in user and system code respectively, and may end up being greater than + * actual elapsed time if multiple CPU cores are performing work for this process. + * + * The result of a previous call to `process.cpuUsage()` can be passed as the + * argument to the function, to get a diff reading. + * + * ```js + * import { cpuUsage } from 'process'; + * + * const startUsage = cpuUsage(); + * // { user: 38579, system: 6986 } + * + * // spin the CPU for 500 milliseconds + * const now = Date.now(); + * while (Date.now() - now < 500); + * + * console.log(cpuUsage(startUsage)); + * // { user: 514883, system: 11226 } + * ``` + * @since v6.1.0 + * @param previousValue A previous return value from calling `process.cpuUsage()` + */ + cpuUsage(previousValue?: CpuUsage): CpuUsage; + /** + * `process.nextTick()` adds `callback` to the "next tick queue". This queue is + * fully drained after the current operation on the JavaScript stack runs to + * completion and before the event loop is allowed to continue. It's possible to + * create an infinite loop if one were to recursively call `process.nextTick()`. + * See the [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#process-nexttick) guide for more background. + * + * ```js + * import { nextTick } from 'process'; + * + * console.log('start'); + * nextTick(() => { + * console.log('nextTick callback'); + * }); + * console.log('scheduled'); + * // Output: + * // start + * // scheduled + * // nextTick callback + * ``` + * + * This is important when developing APIs in order to give users the opportunity + * to assign event handlers _after_ an object has been constructed but before any + * I/O has occurred: + * + * ```js + * import { nextTick } from 'process'; + * + * function MyThing(options) { + * this.setupOptions(options); + * + * nextTick(() => { + * this.startDoingStuff(); + * }); + * } + * + * const thing = new MyThing(); + * thing.getReadyForStuff(); + * + * // thing.startDoingStuff() gets called now, not before. + * ``` + * + * It is very important for APIs to be either 100% synchronous or 100% + * asynchronous. Consider this example: + * + * ```js + * // WARNING! DO NOT USE! BAD UNSAFE HAZARD! + * function maybeSync(arg, cb) { + * if (arg) { + * cb(); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * + * This API is hazardous because in the following case: + * + * ```js + * const maybeTrue = Math.random() > 0.5; + * + * maybeSync(maybeTrue, () => { + * foo(); + * }); + * + * bar(); + * ``` + * + * It is not clear whether `foo()` or `bar()` will be called first. + * + * The following approach is much better: + * + * ```js + * import { nextTick } from 'process'; + * + * function definitelyAsync(arg, cb) { + * if (arg) { + * nextTick(cb); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * @since v0.1.26 + * @param args Additional arguments to pass when invoking the `callback` + */ + nextTick(callback: Function, ...args: any[]): void; + /** + * The `process.release` property returns an `Object` containing metadata related + * to the current release, including URLs for the source tarball and headers-only + * tarball. + * + * `process.release` contains the following properties: + * + * ```js + * { + * name: 'node', + * lts: 'Erbium', + * sourceUrl: 'https://nodejs.org/download/release/v12.18.1/node-v12.18.1.tar.gz', + * headersUrl: 'https://nodejs.org/download/release/v12.18.1/node-v12.18.1-headers.tar.gz', + * libUrl: 'https://nodejs.org/download/release/v12.18.1/win-x64/node.lib' + * } + * ``` + * + * In custom builds from non-release versions of the source tree, only the`name` property may be present. The additional properties should not be + * relied upon to exist. + * @since v3.0.0 + */ + readonly release: ProcessRelease; + features: { + inspector: boolean; + debug: boolean; + uv: boolean; + ipv6: boolean; + tls_alpn: boolean; + tls_sni: boolean; + tls_ocsp: boolean; + tls: boolean; + }; + /** + * `process.umask()` returns the Node.js process's file mode creation mask. Child + * processes inherit the mask from the parent process. + * @since v0.1.19 + * @deprecated Calling `process.umask()` with no argument causes the process-wide umask to be written twice. This introduces a race condition between threads, and is a potential * + * security vulnerability. There is no safe, cross-platform alternative API. + */ + umask(): number; + /** + * Can only be set if not in worker thread. + */ + umask(mask: string | number): number; + /** + * The `process.uptime()` method returns the number of seconds the current Node.js + * process has been running. + * + * The return value includes fractions of a second. Use `Math.floor()` to get whole + * seconds. + * @since v0.5.0 + */ + uptime(): number; + hrtime: HRTime; + /** + * If Node.js is spawned with an IPC channel, the `process.send()` method can be + * used to send messages to the parent process. Messages will be received as a `'message'` event on the parent's `ChildProcess` object. + * + * If Node.js was not spawned with an IPC channel, `process.send` will be`undefined`. + * + * The message goes through serialization and parsing. The resulting message might + * not be the same as what is originally sent. + * @since v0.5.9 + * @param options used to parameterize the sending of certain types of handles.`options` supports the following properties: + */ + send?( + message: any, + sendHandle?: any, + options?: { + swallowErrors?: boolean | undefined; + }, + callback?: (error: Error | null) => void + ): boolean; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.disconnect()` method will close the + * IPC channel to the parent process, allowing the child process to exit gracefully + * once there are no other connections keeping it alive. + * + * The effect of calling `process.disconnect()` is the same as calling `ChildProcess.disconnect()` from the parent process. + * + * If the Node.js process was not spawned with an IPC channel,`process.disconnect()` will be `undefined`. + * @since v0.7.2 + */ + disconnect(): void; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.connected` property will return`true` so long as the IPC + * channel is connected and will return `false` after`process.disconnect()` is called. + * + * Once `process.connected` is `false`, it is no longer possible to send messages + * over the IPC channel using `process.send()`. + * @since v0.7.2 + */ + connected: boolean; + /** + * The `process.allowedNodeEnvironmentFlags` property is a special, + * read-only `Set` of flags allowable within the `NODE_OPTIONS` environment variable. + * + * `process.allowedNodeEnvironmentFlags` extends `Set`, but overrides`Set.prototype.has` to recognize several different possible flag + * representations. `process.allowedNodeEnvironmentFlags.has()` will + * return `true` in the following cases: + * + * * Flags may omit leading single (`-`) or double (`--`) dashes; e.g.,`inspect-brk` for `--inspect-brk`, or `r` for `-r`. + * * Flags passed through to V8 (as listed in `--v8-options`) may replace + * one or more _non-leading_ dashes for an underscore, or vice-versa; + * e.g., `--perf_basic_prof`, `--perf-basic-prof`, `--perf_basic-prof`, + * etc. + * * Flags may contain one or more equals (`=`) characters; all + * characters after and including the first equals will be ignored; + * e.g., `--stack-trace-limit=100`. + * * Flags _must_ be allowable within `NODE_OPTIONS`. + * + * When iterating over `process.allowedNodeEnvironmentFlags`, flags will + * appear only _once_; each will begin with one or more dashes. Flags + * passed through to V8 will contain underscores instead of non-leading + * dashes: + * + * ```js + * import { allowedNodeEnvironmentFlags } from 'process'; + * + * allowedNodeEnvironmentFlags.forEach((flag) => { + * // -r + * // --inspect-brk + * // --abort_on_uncaught_exception + * // ... + * }); + * ``` + * + * The methods `add()`, `clear()`, and `delete()` of`process.allowedNodeEnvironmentFlags` do nothing, and will fail + * silently. + * + * If Node.js was compiled _without_ `NODE_OPTIONS` support (shown in {@link config}), `process.allowedNodeEnvironmentFlags` will + * contain what _would have_ been allowable. + * @since v10.10.0 + */ + allowedNodeEnvironmentFlags: ReadonlySet; + /** + * `process.report` is an object whose methods are used to generate diagnostic + * reports for the current process. Additional documentation is available in the `report documentation`. + * @since v11.8.0 + */ + report?: ProcessReport | undefined; + /** + * ```js + * import { resourceUsage } from 'process'; + * + * console.log(resourceUsage()); + * /* + * Will output: + * { + * userCPUTime: 82872, + * systemCPUTime: 4143, + * maxRSS: 33164, + * sharedMemorySize: 0, + * unsharedDataSize: 0, + * unsharedStackSize: 0, + * minorPageFault: 2469, + * majorPageFault: 0, + * swappedOut: 0, + * fsRead: 0, + * fsWrite: 8, + * ipcSent: 0, + * ipcReceived: 0, + * signalsCount: 0, + * voluntaryContextSwitches: 79, + * involuntaryContextSwitches: 1 + * } + * + * ``` + * @since v12.6.0 + * @return the resource usage for the current process. All of these values come from the `uv_getrusage` call which returns a [`uv_rusage_t` struct][uv_rusage_t]. + */ + resourceUsage(): ResourceUsage; + /** + * The `process.traceDeprecation` property indicates whether the`--trace-deprecation` flag is set on the current Node.js process. See the + * documentation for the `'warning' event` and the `emitWarning() method` for more information about this + * flag's behavior. + * @since v0.8.0 + */ + traceDeprecation: boolean; + /* EventEmitter */ + addListener(event: 'beforeExit', listener: BeforeExitListener): this; + addListener(event: 'disconnect', listener: DisconnectListener): this; + addListener(event: 'exit', listener: ExitListener): this; + addListener(event: 'rejectionHandled', listener: RejectionHandledListener): this; + addListener(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + addListener(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + addListener(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + addListener(event: 'warning', listener: WarningListener): this; + addListener(event: 'message', listener: MessageListener): this; + addListener(event: Signals, listener: SignalsListener): this; + addListener(event: 'multipleResolves', listener: MultipleResolveListener): this; + addListener(event: 'worker', listener: WorkerListener): this; + emit(event: 'beforeExit', code: number): boolean; + emit(event: 'disconnect'): boolean; + emit(event: 'exit', code: number): boolean; + emit(event: 'rejectionHandled', promise: Promise): boolean; + emit(event: 'uncaughtException', error: Error): boolean; + emit(event: 'uncaughtExceptionMonitor', error: Error): boolean; + emit(event: 'unhandledRejection', reason: unknown, promise: Promise): boolean; + emit(event: 'warning', warning: Error): boolean; + emit(event: 'message', message: unknown, sendHandle: unknown): this; + emit(event: Signals, signal?: Signals): boolean; + emit(event: 'multipleResolves', type: MultipleResolveType, promise: Promise, value: unknown): this; + emit(event: 'worker', listener: WorkerListener): this; + on(event: 'beforeExit', listener: BeforeExitListener): this; + on(event: 'disconnect', listener: DisconnectListener): this; + on(event: 'exit', listener: ExitListener): this; + on(event: 'rejectionHandled', listener: RejectionHandledListener): this; + on(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + on(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + on(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + on(event: 'warning', listener: WarningListener): this; + on(event: 'message', listener: MessageListener): this; + on(event: Signals, listener: SignalsListener): this; + on(event: 'multipleResolves', listener: MultipleResolveListener): this; + on(event: 'worker', listener: WorkerListener): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'beforeExit', listener: BeforeExitListener): this; + once(event: 'disconnect', listener: DisconnectListener): this; + once(event: 'exit', listener: ExitListener): this; + once(event: 'rejectionHandled', listener: RejectionHandledListener): this; + once(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + once(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + once(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + once(event: 'warning', listener: WarningListener): this; + once(event: 'message', listener: MessageListener): this; + once(event: Signals, listener: SignalsListener): this; + once(event: 'multipleResolves', listener: MultipleResolveListener): this; + once(event: 'worker', listener: WorkerListener): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'beforeExit', listener: BeforeExitListener): this; + prependListener(event: 'disconnect', listener: DisconnectListener): this; + prependListener(event: 'exit', listener: ExitListener): this; + prependListener(event: 'rejectionHandled', listener: RejectionHandledListener): this; + prependListener(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + prependListener(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + prependListener(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + prependListener(event: 'warning', listener: WarningListener): this; + prependListener(event: 'message', listener: MessageListener): this; + prependListener(event: Signals, listener: SignalsListener): this; + prependListener(event: 'multipleResolves', listener: MultipleResolveListener): this; + prependListener(event: 'worker', listener: WorkerListener): this; + prependOnceListener(event: 'beforeExit', listener: BeforeExitListener): this; + prependOnceListener(event: 'disconnect', listener: DisconnectListener): this; + prependOnceListener(event: 'exit', listener: ExitListener): this; + prependOnceListener(event: 'rejectionHandled', listener: RejectionHandledListener): this; + prependOnceListener(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + prependOnceListener(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + prependOnceListener(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + prependOnceListener(event: 'warning', listener: WarningListener): this; + prependOnceListener(event: 'message', listener: MessageListener): this; + prependOnceListener(event: Signals, listener: SignalsListener): this; + prependOnceListener(event: 'multipleResolves', listener: MultipleResolveListener): this; + prependOnceListener(event: 'worker', listener: WorkerListener): this; + listeners(event: 'beforeExit'): BeforeExitListener[]; + listeners(event: 'disconnect'): DisconnectListener[]; + listeners(event: 'exit'): ExitListener[]; + listeners(event: 'rejectionHandled'): RejectionHandledListener[]; + listeners(event: 'uncaughtException'): UncaughtExceptionListener[]; + listeners(event: 'uncaughtExceptionMonitor'): UncaughtExceptionListener[]; + listeners(event: 'unhandledRejection'): UnhandledRejectionListener[]; + listeners(event: 'warning'): WarningListener[]; + listeners(event: 'message'): MessageListener[]; + listeners(event: Signals): SignalsListener[]; + listeners(event: 'multipleResolves'): MultipleResolveListener[]; + listeners(event: 'worker'): WorkerListener[]; + } + } + } + export = process; +} +declare module 'node:process' { + import process = require('process'); + export = process; +} diff --git a/node_modules/@types/node/punycode.d.ts b/node_modules/@types/node/punycode.d.ts new file mode 100755 index 0000000..87ebbb9 --- /dev/null +++ b/node_modules/@types/node/punycode.d.ts @@ -0,0 +1,117 @@ +/** + * **The version of the punycode module bundled in Node.js is being deprecated.**In a future major version of Node.js this module will be removed. Users + * currently depending on the `punycode` module should switch to using the + * userland-provided [Punycode.js](https://github.com/bestiejs/punycode.js) module instead. For punycode-based URL + * encoding, see `url.domainToASCII` or, more generally, the `WHATWG URL API`. + * + * The `punycode` module is a bundled version of the [Punycode.js](https://github.com/bestiejs/punycode.js) module. It + * can be accessed using: + * + * ```js + * const punycode = require('punycode'); + * ``` + * + * [Punycode](https://tools.ietf.org/html/rfc3492) is a character encoding scheme defined by RFC 3492 that is + * primarily intended for use in Internationalized Domain Names. Because host + * names in URLs are limited to ASCII characters only, Domain Names that contain + * non-ASCII characters must be converted into ASCII using the Punycode scheme. + * For instance, the Japanese character that translates into the English word,`'example'` is `'例'`. The Internationalized Domain Name, `'例.com'` (equivalent + * to `'example.com'`) is represented by Punycode as the ASCII string`'xn--fsq.com'`. + * + * The `punycode` module provides a simple implementation of the Punycode standard. + * + * The `punycode` module is a third-party dependency used by Node.js and + * made available to developers as a convenience. Fixes or other modifications to + * the module must be directed to the [Punycode.js](https://github.com/bestiejs/punycode.js) project. + * @deprecated Since v7.0.0 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/punycode.js) + */ +declare module 'punycode' { + /** + * The `punycode.decode()` method converts a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only + * characters to the equivalent string of Unicode codepoints. + * + * ```js + * punycode.decode('maana-pta'); // 'mañana' + * punycode.decode('--dqo34k'); // '☃-⌘' + * ``` + * @since v0.5.1 + */ + function decode(string: string): string; + /** + * The `punycode.encode()` method converts a string of Unicode codepoints to a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only characters. + * + * ```js + * punycode.encode('mañana'); // 'maana-pta' + * punycode.encode('☃-⌘'); // '--dqo34k' + * ``` + * @since v0.5.1 + */ + function encode(string: string): string; + /** + * The `punycode.toUnicode()` method converts a string representing a domain name + * containing [Punycode](https://tools.ietf.org/html/rfc3492) encoded characters into Unicode. Only the [Punycode](https://tools.ietf.org/html/rfc3492) encoded parts of the domain name are be + * converted. + * + * ```js + * // decode domain names + * punycode.toUnicode('xn--maana-pta.com'); // 'mañana.com' + * punycode.toUnicode('xn----dqo34k.com'); // '☃-⌘.com' + * punycode.toUnicode('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toUnicode(domain: string): string; + /** + * The `punycode.toASCII()` method converts a Unicode string representing an + * Internationalized Domain Name to [Punycode](https://tools.ietf.org/html/rfc3492). Only the non-ASCII parts of the + * domain name will be converted. Calling `punycode.toASCII()` on a string that + * already only contains ASCII characters will have no effect. + * + * ```js + * // encode domain names + * punycode.toASCII('mañana.com'); // 'xn--maana-pta.com' + * punycode.toASCII('☃-⌘.com'); // 'xn----dqo34k.com' + * punycode.toASCII('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toASCII(domain: string): string; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const ucs2: ucs2; + interface ucs2 { + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + decode(string: string): number[]; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + encode(codePoints: ReadonlyArray): string; + } + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const version: string; +} +declare module 'node:punycode' { + export * from 'punycode'; +} diff --git a/node_modules/@types/node/querystring.d.ts b/node_modules/@types/node/querystring.d.ts new file mode 100755 index 0000000..e118547 --- /dev/null +++ b/node_modules/@types/node/querystring.d.ts @@ -0,0 +1,131 @@ +/** + * The `querystring` module provides utilities for parsing and formatting URL + * query strings. It can be accessed using: + * + * ```js + * const querystring = require('querystring'); + * ``` + * + * `querystring` is more performant than `URLSearchParams` but is not a + * standardized API. Use `URLSearchParams` when performance is not critical + * or when compatibility with browser code is desirable. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/querystring.js) + */ +declare module 'querystring' { + interface StringifyOptions { + encodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParseOptions { + maxKeys?: number | undefined; + decodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParsedUrlQuery extends NodeJS.Dict {} + interface ParsedUrlQueryInput extends NodeJS.Dict | ReadonlyArray | ReadonlyArray | null> {} + /** + * The `querystring.stringify()` method produces a URL query string from a + * given `obj` by iterating through the object's "own properties". + * + * It serializes the following types of values passed in `obj`:[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) | + * [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to + * empty strings. + * + * ```js + * querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); + * // Returns 'foo=bar&baz=qux&baz=quux&corge=' + * + * querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); + * // Returns 'foo:bar;baz:qux' + * ``` + * + * By default, characters requiring percent-encoding within the query string will + * be encoded as UTF-8\. If an alternative encoding is required, then an alternative`encodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkEncodeURIComponent function already exists, + * + * querystring.stringify({ w: '中文', foo: 'bar' }, null, null, + * { encodeURIComponent: gbkEncodeURIComponent }); + * ``` + * @since v0.1.25 + * @param obj The object to serialize into a URL query string + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string; + /** + * The `querystring.parse()` method parses a URL query string (`str`) into a + * collection of key and value pairs. + * + * For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into: + * + * ```js + * { + * foo: 'bar', + * abc: ['xyz', '123'] + * } + * ``` + * + * The object returned by the `querystring.parse()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * By default, percent-encoded characters within the query string will be assumed + * to use UTF-8 encoding. If an alternative character encoding is used, then an + * alternative `decodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkDecodeURIComponent function already exists... + * + * querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null, + * { decodeURIComponent: gbkDecodeURIComponent }); + * ``` + * @since v0.1.25 + * @param str The URL query string to parse + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; + /** + * The querystring.encode() function is an alias for querystring.stringify(). + */ + const encode: typeof stringify; + /** + * The querystring.decode() function is an alias for querystring.parse(). + */ + const decode: typeof parse; + /** + * The `querystring.escape()` method performs URL percent-encoding on the given`str` in a manner that is optimized for the specific requirements of URL + * query strings. + * + * The `querystring.escape()` method is used by `querystring.stringify()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement percent-encoding implementation if + * necessary by assigning `querystring.escape` to an alternative function. + * @since v0.1.25 + */ + function escape(str: string): string; + /** + * The `querystring.unescape()` method performs decoding of URL percent-encoded + * characters on the given `str`. + * + * The `querystring.unescape()` method is used by `querystring.parse()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement decoding implementation if + * necessary by assigning `querystring.unescape` to an alternative function. + * + * By default, the `querystring.unescape()` method will attempt to use the + * JavaScript built-in `decodeURIComponent()` method to decode. If that fails, + * a safer equivalent that does not throw on malformed URLs will be used. + * @since v0.1.25 + */ + function unescape(str: string): string; +} +declare module 'node:querystring' { + export * from 'querystring'; +} diff --git a/node_modules/@types/node/readline.d.ts b/node_modules/@types/node/readline.d.ts new file mode 100755 index 0000000..6ab64ac --- /dev/null +++ b/node_modules/@types/node/readline.d.ts @@ -0,0 +1,653 @@ +/** + * The `readline` module provides an interface for reading data from a `Readable` stream (such as `process.stdin`) one line at a time. + * + * To use the promise-based APIs: + * + * ```js + * import * as readline from 'node:readline/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as readline from 'node:readline'; + * ``` + * + * The following simple example illustrates the basic use of the `readline` module. + * + * ```js + * import * as readline from 'node:readline/promises'; + * import { stdin as input, stdout as output } from 'node:process'; + * + * const rl = readline.createInterface({ input, output }); + * + * const answer = await rl.question('What do you think of Node.js? '); + * + * console.log(`Thank you for your valuable feedback: ${answer}`); + * + * rl.close(); + * ``` + * + * Once this code is invoked, the Node.js application will not terminate until the`readline.Interface` is closed because the interface waits for data to be + * received on the `input` stream. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline.js) + */ +declare module 'readline' { + import { Abortable, EventEmitter } from 'node:events'; + import * as promises from 'node:readline/promises'; + + export { promises }; + export interface Key { + sequence?: string | undefined; + name?: string | undefined; + ctrl?: boolean | undefined; + meta?: boolean | undefined; + shift?: boolean | undefined; + } + /** + * Instances of the `readline.Interface` class are constructed using the`readline.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v0.1.104 + */ + export class Interface extends EventEmitter { + readonly terminal: boolean; + /** + * The current input data being processed by node. + * + * This can be used when collecting input from a TTY stream to retrieve the + * current value that has been processed thus far, prior to the `line` event + * being emitted. Once the `line` event has been emitted, this property will + * be an empty string. + * + * Be aware that modifying the value during the instance runtime may have + * unintended consequences if `rl.cursor` is not also controlled. + * + * **If not using a TTY stream for input, use the `'line'` event.** + * + * One possible use case would be as follows: + * + * ```js + * const values = ['lorem ipsum', 'dolor sit amet']; + * const rl = readline.createInterface(process.stdin); + * const showResults = debounce(() => { + * console.log( + * '\n', + * values.filter((val) => val.startsWith(rl.line)).join(' ') + * ); + * }, 300); + * process.stdin.on('keypress', (c, k) => { + * showResults(); + * }); + * ``` + * @since v0.1.98 + */ + readonly line: string; + /** + * The cursor position relative to `rl.line`. + * + * This will track where the current cursor lands in the input string, when + * reading input from a TTY stream. The position of cursor determines the + * portion of the input string that will be modified as input is processed, + * as well as the column where the terminal caret will be rendered. + * @since v0.1.98 + */ + readonly cursor: number; + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface + */ + protected constructor(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer | AsyncCompleter, terminal?: boolean); + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface + */ + protected constructor(options: ReadLineOptions); + /** + * The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + * @since v15.3.0 + * @return the current prompt string + */ + getPrompt(): string; + /** + * The `rl.setPrompt()` method sets the prompt that will be written to `output`whenever `rl.prompt()` is called. + * @since v0.1.98 + */ + setPrompt(prompt: string): void; + /** + * The `rl.prompt()` method writes the `readline.Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new + * location at which to provide input. + * + * When called, `rl.prompt()` will resume the `input` stream if it has been + * paused. + * + * If the `readline.Interface` was created with `output` set to `null` or`undefined` the prompt is not written. + * @since v0.1.98 + * @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`. + */ + prompt(preserveCursor?: boolean): void; + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `readline.Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * The `callback` function passed to `rl.question()` does not follow the typical + * pattern of accepting an `Error` object or `null` as the first argument. + * The `callback` is called with the provided answer as the only argument. + * + * Example usage: + * + * ```js + * rl.question('What is your favorite food? ', (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * ``` + * + * Using an `AbortController` to cancel a question. + * + * ```js + * const ac = new AbortController(); + * const signal = ac.signal; + * + * rl.question('What is your favorite food? ', { signal }, (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * setTimeout(() => ac.abort(), 10000); + * ``` + * + * If this method is invoked as it's util.promisify()ed version, it returns a + * Promise that fulfills with the answer. If the question is canceled using + * an `AbortController` it will reject with an `AbortError`. + * + * ```js + * const util = require('util'); + * const question = util.promisify(rl.question).bind(rl); + * + * async function questionExample() { + * try { + * const answer = await question('What is you favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * } catch (err) { + * console.error('Question rejected', err); + * } + * } + * questionExample(); + * ``` + * @since v0.3.3 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @param callback A callback function that is invoked with the user's input in response to the `query`. + */ + question(query: string, callback: (answer: string) => void): void; + question(query: string, options: Abortable, callback: (answer: string) => void): void; + /** + * The `rl.pause()` method pauses the `input` stream, allowing it to be resumed + * later if necessary. + * + * Calling `rl.pause()` does not immediately pause other events (including`'line'`) from being emitted by the `readline.Interface` instance. + * @since v0.3.4 + */ + pause(): this; + /** + * The `rl.resume()` method resumes the `input` stream if it has been paused. + * @since v0.3.4 + */ + resume(): this; + /** + * The `rl.close()` method closes the `readline.Interface` instance and + * relinquishes control over the `input` and `output` streams. When called, + * the `'close'` event will be emitted. + * + * Calling `rl.close()` does not immediately stop other events (including `'line'`) + * from being emitted by the `readline.Interface` instance. + * @since v0.1.98 + */ + close(): void; + /** + * The `rl.write()` method will write either `data` or a key sequence identified + * by `key` to the `output`. The `key` argument is supported only if `output` is + * a `TTY` text terminal. See `TTY keybindings` for a list of key + * combinations. + * + * If `key` is specified, `data` is ignored. + * + * When called, `rl.write()` will resume the `input` stream if it has been + * paused. + * + * If the `readline.Interface` was created with `output` set to `null` or`undefined` the `data` and `key` are not written. + * + * ```js + * rl.write('Delete this!'); + * // Simulate Ctrl+U to delete the line written previously + * rl.write(null, { ctrl: true, name: 'u' }); + * ``` + * + * The `rl.write()` method will write the data to the `readline` `Interface`'s`input`_as if it were provided by the user_. + * @since v0.1.98 + */ + write(data: string | Buffer, key?: Key): void; + write(data: undefined | null | string | Buffer, key: Key): void; + /** + * Returns the real position of the cursor in relation to the input + * prompt + string. Long input (wrapping) strings, as well as multiple + * line prompts are included in the calculations. + * @since v13.5.0, v12.16.0 + */ + getCursorPos(): CursorPos; + /** + * events.EventEmitter + * 1. close + * 2. line + * 3. pause + * 4. resume + * 5. SIGCONT + * 6. SIGINT + * 7. SIGTSTP + * 8. history + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'line', listener: (input: string) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: 'SIGCONT', listener: () => void): this; + addListener(event: 'SIGINT', listener: () => void): this; + addListener(event: 'SIGTSTP', listener: () => void): this; + addListener(event: 'history', listener: (history: string[]) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'line', input: string): boolean; + emit(event: 'pause'): boolean; + emit(event: 'resume'): boolean; + emit(event: 'SIGCONT'): boolean; + emit(event: 'SIGINT'): boolean; + emit(event: 'SIGTSTP'): boolean; + emit(event: 'history', history: string[]): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'line', listener: (input: string) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: 'SIGCONT', listener: () => void): this; + on(event: 'SIGINT', listener: () => void): this; + on(event: 'SIGTSTP', listener: () => void): this; + on(event: 'history', listener: (history: string[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'line', listener: (input: string) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: 'SIGCONT', listener: () => void): this; + once(event: 'SIGINT', listener: () => void): this; + once(event: 'SIGTSTP', listener: () => void): this; + once(event: 'history', listener: (history: string[]) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'line', listener: (input: string) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: 'SIGCONT', listener: () => void): this; + prependListener(event: 'SIGINT', listener: () => void): this; + prependListener(event: 'SIGTSTP', listener: () => void): this; + prependListener(event: 'history', listener: (history: string[]) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'line', listener: (input: string) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: 'SIGCONT', listener: () => void): this; + prependOnceListener(event: 'SIGINT', listener: () => void): this; + prependOnceListener(event: 'SIGTSTP', listener: () => void): this; + prependOnceListener(event: 'history', listener: (history: string[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + export type ReadLine = Interface; // type forwarded for backwards compatibility + export type Completer = (line: string) => CompleterResult; + export type AsyncCompleter = (line: string, callback: (err?: null | Error, result?: CompleterResult) => void) => void; + export type CompleterResult = [string[], string]; + export interface ReadLineOptions { + input: NodeJS.ReadableStream; + output?: NodeJS.WritableStream | undefined; + completer?: Completer | AsyncCompleter | undefined; + terminal?: boolean | undefined; + /** + * Initial list of history lines. This option makes sense + * only if `terminal` is set to `true` by the user or by an internal `output` + * check, otherwise the history caching mechanism is not initialized at all. + * @default [] + */ + history?: string[] | undefined; + historySize?: number | undefined; + prompt?: string | undefined; + crlfDelay?: number | undefined; + /** + * If `true`, when a new input line added + * to the history list duplicates an older one, this removes the older line + * from the list. + * @default false + */ + removeHistoryDuplicates?: boolean | undefined; + escapeCodeTimeout?: number | undefined; + tabSize?: number | undefined; + } + /** + * The `readline.createInterface()` method creates a new `readline.Interface`instance. + * + * ```js + * const readline = require('readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout + * }); + * ``` + * + * Once the `readline.Interface` instance is created, the most common case is to + * listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * + * When creating a `readline.Interface` using `stdin` as input, the program + * will not terminate until it receives `EOF` (Ctrl+D on + * Linux/macOS, Ctrl+Z followed by Return on + * Windows). + * If you want your application to exit without waiting for user input, you can `unref()` the standard input stream: + * + * ```js + * process.stdin.unref(); + * ``` + * @since v0.1.98 + */ + export function createInterface(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer | AsyncCompleter, terminal?: boolean): Interface; + export function createInterface(options: ReadLineOptions): Interface; + /** + * The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input. + * + * Optionally, `interface` specifies a `readline.Interface` instance for which + * autocompletion is disabled when copy-pasted input is detected. + * + * If the `stream` is a `TTY`, then it must be in raw mode. + * + * This is automatically called by any readline instance on its `input` if the`input` is a terminal. Closing the `readline` instance does not stop + * the `input` from emitting `'keypress'` events. + * + * ```js + * readline.emitKeypressEvents(process.stdin); + * if (process.stdin.isTTY) + * process.stdin.setRawMode(true); + * ``` + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ' + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('events'); + * const { createReadStream } = require('fs'); + * const { createInterface } = require('readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + */ + export function emitKeypressEvents(stream: NodeJS.ReadableStream, readlineInterface?: Interface): void; + export type Direction = -1 | 0 | 1; + export interface CursorPos { + rows: number; + cols: number; + } + /** + * The `readline.clearLine()` method clears current line of given `TTY` stream + * in a specified direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearLine(stream: NodeJS.WritableStream, dir: Direction, callback?: () => void): boolean; + /** + * The `readline.clearScreenDown()` method clears the given `TTY` stream from + * the current position of the cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearScreenDown(stream: NodeJS.WritableStream, callback?: () => void): boolean; + /** + * The `readline.cursorTo()` method moves cursor to the specified position in a + * given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number, callback?: () => void): boolean; + /** + * The `readline.moveCursor()` method moves the cursor _relative_ to its current + * position in a given `TTY` `stream`. + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ' + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('events'); + * const { createReadStream } = require('fs'); + * const { createInterface } = require('readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function moveCursor(stream: NodeJS.WritableStream, dx: number, dy: number, callback?: () => void): boolean; +} +declare module 'node:readline' { + export * from 'readline'; +} diff --git a/node_modules/@types/node/readline/promises.d.ts b/node_modules/@types/node/readline/promises.d.ts new file mode 100755 index 0000000..8f9f06f --- /dev/null +++ b/node_modules/@types/node/readline/promises.d.ts @@ -0,0 +1,143 @@ +/** + * The `readline/promise` module provides an API for reading lines of input from a Readable stream one line at a time. + * + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline/promises.js) + * @since v17.0.0 + */ +declare module 'readline/promises' { + import { Interface as _Interface, ReadLineOptions, Completer, AsyncCompleter, Direction } from 'node:readline'; + import { Abortable } from 'node:events'; + + class Interface extends _Interface { + /** + * The rl.question() method displays the query by writing it to the output, waits for user input to be provided on input, + * then invokes the callback function passing the provided input as the first argument. + * + * When called, rl.question() will resume the input stream if it has been paused. + * + * If the readlinePromises.Interface was created with output set to null or undefined the query is not written. + * + * If the question is called after rl.close(), it returns a rejected promise. + * + * Example usage: + * + * ```js + * const answer = await rl.question('What is your favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * Using an AbortSignal to cancel a question. + * + * ```js + * const signal = AbortSignal.timeout(10_000); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * const answer = await rl.question('What is your favorite food? ', { signal }); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * @since v17.0.0 + * @param query A statement or query to write to output, prepended to the prompt. + */ + question(query: string): Promise; + question(query: string, options: Abortable): Promise; + } + + class Readline { + /** + * @param stream A TTY stream. + */ + constructor(stream: NodeJS.WritableStream, options?: { autoCommit?: boolean }); + /** + * The `rl.clearLine()` method adds to the internal list of pending action an action that clears current line of the associated `stream` in a specified direction identified by `dir`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + */ + clearLine(dir: Direction): this; + /** + * The `rl.clearScreenDown()` method adds to the internal list of pending action an action that clears the associated `stream` from the current position of the cursor down. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + */ + clearScreenDown(): this; + /** + * The `rl.commit()` method sends all the pending actions to the associated `stream` and clears the internal list of pending actions. + */ + commit(): Promise; + /** + * The `rl.cursorTo()` method adds to the internal list of pending action an action that moves cursor to the specified position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + */ + cursorTo(x: number, y?: number): this; + /** + * The `rl.moveCursor()` method adds to the internal list of pending action an action that moves the cursor relative to its current position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless autoCommit: true was passed to the constructor. + */ + moveCursor(dx: number, dy: number): this; + /** + * The `rl.rollback()` method clears the internal list of pending actions without sending it to the associated `stream`. + */ + rollback(): this; + } + + /** + * The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface` instance. + * + * ```js + * const readlinePromises = require('node:readline/promises'); + * const rl = readlinePromises.createInterface({ + * input: process.stdin, + * output: process.stdout + * }); + * ``` + * + * Once the `readlinePromises.Interface` instance is created, the most common case is to listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get the best compatibility if it defines an `output.columns` property, + * and emits a `'resize'` event on the `output`, if or when the columns ever change (`process.stdout` does this automatically when it is a TTY). + * + * ## Use of the `completer` function + * + * The `completer` function takes the current line entered by the user as an argument, and returns an `Array` with 2 entries: + * + * - An Array with matching entries for the completion. + * - The substring that was used for the matching. + * + * For instance: `[[substr1, substr2, ...], originalsubstring]`. + * + * ```js + * function completer(line) { + * const completions = '.help .error .exit .quit .q'.split(' '); + * const hits = completions.filter((c) => c.startsWith(line)); + * // Show all completions if none found + * return [hits.length ? hits : completions, line]; + * } + * ``` + * + * The `completer` function can also returns a `Promise`, or be asynchronous: + * + * ```js + * async function completer(linePartial) { + * await someAsyncWork(); + * return [['123'], linePartial]; + * } + * ``` + */ + function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + function createInterface(options: ReadLineOptions): Interface; +} +declare module 'node:readline/promises' { + export * from 'readline/promises'; +} diff --git a/node_modules/@types/node/repl.d.ts b/node_modules/@types/node/repl.d.ts new file mode 100755 index 0000000..be42ccc --- /dev/null +++ b/node_modules/@types/node/repl.d.ts @@ -0,0 +1,424 @@ +/** + * The `repl` module provides a Read-Eval-Print-Loop (REPL) implementation that + * is available both as a standalone program or includible in other applications. + * It can be accessed using: + * + * ```js + * const repl = require('repl'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/repl.js) + */ +declare module 'repl' { + import { Interface, Completer, AsyncCompleter } from 'node:readline'; + import { Context } from 'node:vm'; + import { InspectOptions } from 'node:util'; + interface ReplOptions { + /** + * The input prompt to display. + * @default "> " + */ + prompt?: string | undefined; + /** + * The `Readable` stream from which REPL input will be read. + * @default process.stdin + */ + input?: NodeJS.ReadableStream | undefined; + /** + * The `Writable` stream to which REPL output will be written. + * @default process.stdout + */ + output?: NodeJS.WritableStream | undefined; + /** + * If `true`, specifies that the output should be treated as a TTY terminal, and have + * ANSI/VT100 escape codes written to it. + * Default: checking the value of the `isTTY` property on the output stream upon + * instantiation. + */ + terminal?: boolean | undefined; + /** + * The function to be used when evaluating each given line of input. + * Default: an async wrapper for the JavaScript `eval()` function. An `eval` function can + * error with `repl.Recoverable` to indicate the input was incomplete and prompt for + * additional lines. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_default_evaluation + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_custom_evaluation_functions + */ + eval?: REPLEval | undefined; + /** + * Defines if the repl prints output previews or not. + * @default `true` Always `false` in case `terminal` is falsy. + */ + preview?: boolean | undefined; + /** + * If `true`, specifies that the default `writer` function should include ANSI color + * styling to REPL output. If a custom `writer` function is provided then this has no + * effect. + * Default: the REPL instance's `terminal` value. + */ + useColors?: boolean | undefined; + /** + * If `true`, specifies that the default evaluation function will use the JavaScript + * `global` as the context as opposed to creating a new separate context for the REPL + * instance. The node CLI REPL sets this value to `true`. + * Default: `false`. + */ + useGlobal?: boolean | undefined; + /** + * If `true`, specifies that the default writer will not output the return value of a + * command if it evaluates to `undefined`. + * Default: `false`. + */ + ignoreUndefined?: boolean | undefined; + /** + * The function to invoke to format the output of each command before writing to `output`. + * Default: a wrapper for `util.inspect`. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_customizing_repl_output + */ + writer?: REPLWriter | undefined; + /** + * An optional function used for custom Tab auto completion. + * + * @see https://nodejs.org/dist/latest-v11.x/docs/api/readline.html#readline_use_of_the_completer_function + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * A flag that specifies whether the default evaluator executes all JavaScript commands in + * strict mode or default (sloppy) mode. + * Accepted values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + replMode?: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT | undefined; + /** + * Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is + * pressed. This cannot be used together with a custom `eval` function. + * Default: `false`. + */ + breakEvalOnSigint?: boolean | undefined; + } + type REPLEval = (this: REPLServer, evalCmd: string, context: Context, file: string, cb: (err: Error | null, result: any) => void) => void; + type REPLWriter = (this: REPLServer, obj: any) => string; + /** + * This is the default "writer" value, if none is passed in the REPL options, + * and it can be overridden by custom print functions. + */ + const writer: REPLWriter & { + options: InspectOptions; + }; + type REPLCommandAction = (this: REPLServer, text: string) => void; + interface REPLCommand { + /** + * Help text to be displayed when `.help` is entered. + */ + help?: string | undefined; + /** + * The function to execute, optionally accepting a single string argument. + */ + action: REPLCommandAction; + } + /** + * Instances of `repl.REPLServer` are created using the {@link start} method + * or directly using the JavaScript `new` keyword. + * + * ```js + * const repl = require('repl'); + * + * const options = { useColors: true }; + * + * const firstInstance = repl.start(options); + * const secondInstance = new repl.REPLServer(options); + * ``` + * @since v0.1.91 + */ + class REPLServer extends Interface { + /** + * The `vm.Context` provided to the `eval` function to be used for JavaScript + * evaluation. + */ + readonly context: Context; + /** + * @deprecated since v14.3.0 - Use `input` instead. + */ + readonly inputStream: NodeJS.ReadableStream; + /** + * @deprecated since v14.3.0 - Use `output` instead. + */ + readonly outputStream: NodeJS.WritableStream; + /** + * The `Readable` stream from which REPL input will be read. + */ + readonly input: NodeJS.ReadableStream; + /** + * The `Writable` stream to which REPL output will be written. + */ + readonly output: NodeJS.WritableStream; + /** + * The commands registered via `replServer.defineCommand()`. + */ + readonly commands: NodeJS.ReadOnlyDict; + /** + * A value indicating whether the REPL is currently in "editor mode". + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_commands_and_special_keys + */ + readonly editorMode: boolean; + /** + * A value indicating whether the `_` variable has been assigned. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreAssigned: boolean; + /** + * The last evaluation result from the REPL (assigned to the `_` variable inside of the REPL). + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly last: any; + /** + * A value indicating whether the `_error` variable has been assigned. + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreErrAssigned: boolean; + /** + * The last error raised inside the REPL (assigned to the `_error` variable inside of the REPL). + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly lastError: any; + /** + * Specified in the REPL options, this is the function to be used when evaluating each + * given line of input. If not specified in the REPL options, this is an async wrapper + * for the JavaScript `eval()` function. + */ + readonly eval: REPLEval; + /** + * Specified in the REPL options, this is a value indicating whether the default + * `writer` function should include ANSI color styling to REPL output. + */ + readonly useColors: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `eval` + * function will use the JavaScript `global` as the context as opposed to creating a new + * separate context for the REPL instance. + */ + readonly useGlobal: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `writer` + * function should output the result of a command if it evaluates to `undefined`. + */ + readonly ignoreUndefined: boolean; + /** + * Specified in the REPL options, this is the function to invoke to format the output of + * each command before writing to `outputStream`. If not specified in the REPL options, + * this will be a wrapper for `util.inspect`. + */ + readonly writer: REPLWriter; + /** + * Specified in the REPL options, this is the function to use for custom Tab auto-completion. + */ + readonly completer: Completer | AsyncCompleter; + /** + * Specified in the REPL options, this is a flag that specifies whether the default `eval` + * function should execute all JavaScript commands in strict mode or default (sloppy) mode. + * Possible values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + readonly replMode: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT; + /** + * NOTE: According to the documentation: + * + * > Instances of `repl.REPLServer` are created using the `repl.start()` method and + * > _should not_ be created directly using the JavaScript `new` keyword. + * + * `REPLServer` cannot be subclassed due to implementation specifics in NodeJS. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_class_replserver + */ + private constructor(); + /** + * The `replServer.defineCommand()` method is used to add new `.`\-prefixed commands + * to the REPL instance. Such commands are invoked by typing a `.` followed by the`keyword`. The `cmd` is either a `Function` or an `Object` with the following + * properties: + * + * The following example shows two new commands added to the REPL instance: + * + * ```js + * const repl = require('repl'); + * + * const replServer = repl.start({ prompt: '> ' }); + * replServer.defineCommand('sayhello', { + * help: 'Say hello', + * action(name) { + * this.clearBufferedCommand(); + * console.log(`Hello, ${name}!`); + * this.displayPrompt(); + * } + * }); + * replServer.defineCommand('saybye', function saybye() { + * console.log('Goodbye!'); + * this.close(); + * }); + * ``` + * + * The new commands can then be used from within the REPL instance: + * + * ```console + * > .sayhello Node.js User + * Hello, Node.js User! + * > .saybye + * Goodbye! + * ``` + * @since v0.3.0 + * @param keyword The command keyword (_without_ a leading `.` character). + * @param cmd The function to invoke when the command is processed. + */ + defineCommand(keyword: string, cmd: REPLCommandAction | REPLCommand): void; + /** + * The `replServer.displayPrompt()` method readies the REPL instance for input + * from the user, printing the configured `prompt` to a new line in the `output`and resuming the `input` to accept new input. + * + * When multi-line input is being entered, an ellipsis is printed rather than the + * 'prompt'. + * + * When `preserveCursor` is `true`, the cursor placement will not be reset to `0`. + * + * The `replServer.displayPrompt` method is primarily intended to be called from + * within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v0.1.91 + */ + displayPrompt(preserveCursor?: boolean): void; + /** + * The `replServer.clearBufferedCommand()` method clears any command that has been + * buffered but not yet executed. This method is primarily intended to be + * called from within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v9.0.0 + */ + clearBufferedCommand(): void; + /** + * Initializes a history log file for the REPL instance. When executing the + * Node.js binary and using the command-line REPL, a history file is initialized + * by default. However, this is not the case when creating a REPL + * programmatically. Use this method to initialize a history log file when working + * with REPL instances programmatically. + * @since v11.10.0 + * @param historyPath the path to the history file + * @param callback called when history writes are ready or upon error + */ + setupHistory(path: string, callback: (err: Error | null, repl: this) => void): void; + /** + * events.EventEmitter + * 1. close - inherited from `readline.Interface` + * 2. line - inherited from `readline.Interface` + * 3. pause - inherited from `readline.Interface` + * 4. resume - inherited from `readline.Interface` + * 5. SIGCONT - inherited from `readline.Interface` + * 6. SIGINT - inherited from `readline.Interface` + * 7. SIGTSTP - inherited from `readline.Interface` + * 8. exit + * 9. reset + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'line', listener: (input: string) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: 'SIGCONT', listener: () => void): this; + addListener(event: 'SIGINT', listener: () => void): this; + addListener(event: 'SIGTSTP', listener: () => void): this; + addListener(event: 'exit', listener: () => void): this; + addListener(event: 'reset', listener: (context: Context) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'line', input: string): boolean; + emit(event: 'pause'): boolean; + emit(event: 'resume'): boolean; + emit(event: 'SIGCONT'): boolean; + emit(event: 'SIGINT'): boolean; + emit(event: 'SIGTSTP'): boolean; + emit(event: 'exit'): boolean; + emit(event: 'reset', context: Context): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'line', listener: (input: string) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: 'SIGCONT', listener: () => void): this; + on(event: 'SIGINT', listener: () => void): this; + on(event: 'SIGTSTP', listener: () => void): this; + on(event: 'exit', listener: () => void): this; + on(event: 'reset', listener: (context: Context) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'line', listener: (input: string) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: 'SIGCONT', listener: () => void): this; + once(event: 'SIGINT', listener: () => void): this; + once(event: 'SIGTSTP', listener: () => void): this; + once(event: 'exit', listener: () => void): this; + once(event: 'reset', listener: (context: Context) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'line', listener: (input: string) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: 'SIGCONT', listener: () => void): this; + prependListener(event: 'SIGINT', listener: () => void): this; + prependListener(event: 'SIGTSTP', listener: () => void): this; + prependListener(event: 'exit', listener: () => void): this; + prependListener(event: 'reset', listener: (context: Context) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'line', listener: (input: string) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: 'SIGCONT', listener: () => void): this; + prependOnceListener(event: 'SIGINT', listener: () => void): this; + prependOnceListener(event: 'SIGTSTP', listener: () => void): this; + prependOnceListener(event: 'exit', listener: () => void): this; + prependOnceListener(event: 'reset', listener: (context: Context) => void): this; + } + /** + * A flag passed in the REPL options. Evaluates expressions in sloppy mode. + */ + const REPL_MODE_SLOPPY: unique symbol; + /** + * A flag passed in the REPL options. Evaluates expressions in strict mode. + * This is equivalent to prefacing every repl statement with `'use strict'`. + */ + const REPL_MODE_STRICT: unique symbol; + /** + * The `repl.start()` method creates and starts a {@link REPLServer} instance. + * + * If `options` is a string, then it specifies the input prompt: + * + * ```js + * const repl = require('repl'); + * + * // a Unix style prompt + * repl.start('$ '); + * ``` + * @since v0.1.91 + */ + function start(options?: string | ReplOptions): REPLServer; + /** + * Indicates a recoverable error that a `REPLServer` can use to support multi-line input. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_recoverable_errors + */ + class Recoverable extends SyntaxError { + err: Error; + constructor(err: Error); + } +} +declare module 'node:repl' { + export * from 'repl'; +} diff --git a/node_modules/@types/node/stream.d.ts b/node_modules/@types/node/stream.d.ts new file mode 100755 index 0000000..a0df689 --- /dev/null +++ b/node_modules/@types/node/stream.d.ts @@ -0,0 +1,1340 @@ +/** + * A stream is an abstract interface for working with streaming data in Node.js. + * The `stream` module provides an API for implementing the stream interface. + * + * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances. + * + * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`. + * + * To access the `stream` module: + * + * ```js + * const stream = require('stream'); + * ``` + * + * The `stream` module is useful for creating new types of stream instances. It is + * usually not necessary to use the `stream` module to consume streams. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/stream.js) + */ +declare module 'stream' { + import { EventEmitter, Abortable } from 'node:events'; + import { Blob as NodeBlob } from "node:buffer"; + import * as streamPromises from 'node:stream/promises'; + import * as streamConsumers from 'node:stream/consumers'; + import * as streamWeb from 'node:stream/web'; + class internal extends EventEmitter { + pipe( + destination: T, + options?: { + end?: boolean | undefined; + } + ): T; + } + namespace internal { + class Stream extends internal { + constructor(opts?: ReadableOptions); + } + interface StreamOptions extends Abortable { + emitClose?: boolean | undefined; + highWaterMark?: number | undefined; + objectMode?: boolean | undefined; + construct?(this: T, callback: (error?: Error | null) => void): void; + destroy?(this: T, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean | undefined; + } + interface ReadableOptions extends StreamOptions { + encoding?: BufferEncoding | undefined; + read?(this: Readable, size: number): void; + } + /** + * @since v0.9.4 + */ + class Readable extends Stream implements NodeJS.ReadableStream { + /** + * A utility method for creating Readable Streams out of iterators. + */ + static from(iterable: Iterable | AsyncIterable, options?: ReadableOptions): Readable; + /** + * A utility method for creating a `Readable` from a web `ReadableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb(readableStream: streamWeb.ReadableStream, options?: Pick): Readable; + /** + * Returns whether the stream has been read from or cancelled. + * @since v16.8.0 + */ + static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; + /** + * A utility method for creating a web `ReadableStream` from a `Readable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamReadable: Readable): streamWeb.ReadableStream; + /** + * Returns whether the stream was destroyed or errored before emitting `'end'`. + * @since v16.8.0 + * @experimental + */ + readonly readableAborted: boolean; + /** + * Is `true` if it is safe to call `readable.read()`, which means + * the stream has not been destroyed or emitted `'error'` or `'end'`. + * @since v11.4.0 + */ + readable: boolean; + /** + * Returns whether `'data'` has been emitted. + * @since v16.7.0, v14.18.0 + * @experimental + */ + readonly readableDidRead: boolean; + /** + * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method. + * @since v12.7.0 + */ + readonly readableEncoding: BufferEncoding | null; + /** + * Becomes `true` when `'end'` event is emitted. + * @since v12.9.0 + */ + readonly readableEnded: boolean; + /** + * This property reflects the current state of a `Readable` stream as described + * in the `Three states` section. + * @since v9.4.0 + */ + readonly readableFlowing: boolean | null; + /** + * Returns the value of `highWaterMark` passed when creating this `Readable`. + * @since v9.3.0 + */ + readonly readableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be read. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly readableLength: number; + /** + * Getter for the property `objectMode` of a given `Readable` stream. + * @since v12.3.0 + */ + readonly readableObjectMode: boolean; + /** + * Is `true` after `readable.destroy()` has been called. + * @since v18.0.0 + */ + destroyed: boolean; + /** + * Is true after 'close' has been emitted. + * @since v8.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + constructor(opts?: ReadableOptions); + _construct?(callback: (error?: Error | null) => void): void; + _read(size: number): void; + /** + * The `readable.read()` method reads data out of the internal buffer and + * returns it. If no data is available to be read, `null` is returned. By default, + * the data is returned as a `Buffer` object unless an encoding has been + * specified using the `readable.setEncoding()` method or the stream is operating + * in object mode. + * + * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which + * case all of the data remaining in the internal + * buffer will be returned. + * + * If the `size` argument is not specified, all of the data contained in the + * internal buffer will be returned. + * + * The `size` argument must be less than or equal to 1 GiB. + * + * The `readable.read()` method should only be called on `Readable` streams + * operating in paused mode. In flowing mode, `readable.read()` is called + * automatically until the internal buffer is fully drained. + * + * ```js + * const readable = getReadableStreamSomehow(); + * + * // 'readable' may be triggered multiple times as data is buffered in + * readable.on('readable', () => { + * let chunk; + * console.log('Stream is readable (new data received in buffer)'); + * // Use a loop to make sure we read all currently available data + * while (null !== (chunk = readable.read())) { + * console.log(`Read ${chunk.length} bytes of data...`); + * } + * }); + * + * // 'end' will be triggered once when there is no more data available + * readable.on('end', () => { + * console.log('Reached end of stream.'); + * }); + * ``` + * + * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks + * are not concatenated. A `while` loop is necessary to consume all data + * currently in the buffer. When reading a large file `.read()` may return `null`, + * having consumed all buffered content so far, but there is still more data to + * come not yet buffered. In this case a new `'readable'` event will be emitted + * when there is more data in the buffer. Finally the `'end'` event will be + * emitted when there is no more data to come. + * + * Therefore to read a file's whole contents from a `readable`, it is necessary + * to collect chunks across multiple `'readable'` events: + * + * ```js + * const chunks = []; + * + * readable.on('readable', () => { + * let chunk; + * while (null !== (chunk = readable.read())) { + * chunks.push(chunk); + * } + * }); + * + * readable.on('end', () => { + * const content = chunks.join(''); + * }); + * ``` + * + * A `Readable` stream in object mode will always return a single item from + * a call to `readable.read(size)`, regardless of the value of the`size` argument. + * + * If the `readable.read()` method returns a chunk of data, a `'data'` event will + * also be emitted. + * + * Calling {@link read} after the `'end'` event has + * been emitted will return `null`. No runtime error will be raised. + * @since v0.9.4 + * @param size Optional argument to specify how much data to read. + */ + read(size?: number): any; + /** + * The `readable.setEncoding()` method sets the character encoding for + * data read from the `Readable` stream. + * + * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data + * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the + * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal + * string format. + * + * The `Readable` stream will properly handle multi-byte characters delivered + * through the stream that would otherwise become improperly decoded if simply + * pulled from the stream as `Buffer` objects. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.setEncoding('utf8'); + * readable.on('data', (chunk) => { + * assert.equal(typeof chunk, 'string'); + * console.log('Got %d characters of string data:', chunk.length); + * }); + * ``` + * @since v0.9.4 + * @param encoding The encoding to use. + */ + setEncoding(encoding: BufferEncoding): this; + /** + * The `readable.pause()` method will cause a stream in flowing mode to stop + * emitting `'data'` events, switching out of flowing mode. Any data that + * becomes available will remain in the internal buffer. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.on('data', (chunk) => { + * console.log(`Received ${chunk.length} bytes of data.`); + * readable.pause(); + * console.log('There will be no additional data for 1 second.'); + * setTimeout(() => { + * console.log('Now data will start flowing again.'); + * readable.resume(); + * }, 1000); + * }); + * ``` + * + * The `readable.pause()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + pause(): this; + /** + * The `readable.resume()` method causes an explicitly paused `Readable` stream to + * resume emitting `'data'` events, switching the stream into flowing mode. + * + * The `readable.resume()` method can be used to fully consume the data from a + * stream without actually processing any of that data: + * + * ```js + * getReadableStreamSomehow() + * .resume() + * .on('end', () => { + * console.log('Reached the end, but did not read anything.'); + * }); + * ``` + * + * The `readable.resume()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + resume(): this; + /** + * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most + * typical cases, there will be no reason to + * use this method directly. + * + * ```js + * const readable = new stream.Readable(); + * + * readable.isPaused(); // === false + * readable.pause(); + * readable.isPaused(); // === true + * readable.resume(); + * readable.isPaused(); // === false + * ``` + * @since v0.11.14 + */ + isPaused(): boolean; + /** + * The `readable.unpipe()` method detaches a `Writable` stream previously attached + * using the {@link pipe} method. + * + * If the `destination` is not specified, then _all_ pipes are detached. + * + * If the `destination` is specified, but no pipe is set up for it, then + * the method does nothing. + * + * ```js + * const fs = require('fs'); + * const readable = getReadableStreamSomehow(); + * const writable = fs.createWriteStream('file.txt'); + * // All the data from readable goes into 'file.txt', + * // but only for the first second. + * readable.pipe(writable); + * setTimeout(() => { + * console.log('Stop writing to file.txt.'); + * readable.unpipe(writable); + * console.log('Manually close the file stream.'); + * writable.end(); + * }, 1000); + * ``` + * @since v0.9.4 + * @param destination Optional specific stream to unpipe + */ + unpipe(destination?: NodeJS.WritableStream): this; + /** + * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the + * same as `readable.push(null)`, after which no more data can be written. The EOF + * signal is put at the end of the buffer and any buffered data will still be + * flushed. + * + * The `readable.unshift()` method pushes a chunk of data back into the internal + * buffer. This is useful in certain situations where a stream is being consumed by + * code that needs to "un-consume" some amount of data that it has optimistically + * pulled out of the source, so that the data can be passed on to some other party. + * + * The `stream.unshift(chunk)` method cannot be called after the `'end'` event + * has been emitted or a runtime error will be thrown. + * + * Developers using `stream.unshift()` often should consider switching to + * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. + * + * ```js + * // Pull off a header delimited by \n\n. + * // Use unshift() if we get too much. + * // Call the callback with (error, header, stream). + * const { StringDecoder } = require('string_decoder'); + * function parseHeader(stream, callback) { + * stream.on('error', callback); + * stream.on('readable', onReadable); + * const decoder = new StringDecoder('utf8'); + * let header = ''; + * function onReadable() { + * let chunk; + * while (null !== (chunk = stream.read())) { + * const str = decoder.write(chunk); + * if (str.includes('\n\n')) { + * // Found the header boundary. + * const split = str.split(/\n\n/); + * header += split.shift(); + * const remaining = split.join('\n\n'); + * const buf = Buffer.from(remaining, 'utf8'); + * stream.removeListener('error', callback); + * // Remove the 'readable' listener before unshifting. + * stream.removeListener('readable', onReadable); + * if (buf.length) + * stream.unshift(buf); + * // Now the body of the message can be read from the stream. + * callback(null, header, stream); + * return; + * } + * // Still reading the header. + * header += str; + * } + * } + * } + * ``` + * + * Unlike {@link push}, `stream.unshift(chunk)` will not + * end the reading process by resetting the internal reading state of the stream. + * This can cause unexpected results if `readable.unshift()` is called during a + * read (i.e. from within a {@link _read} implementation on a + * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, + * however it is best to simply avoid calling `readable.unshift()` while in the + * process of performing a read. + * @since v0.9.11 + * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array` or `null`. For object mode + * streams, `chunk` may be any JavaScript value. + * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. + */ + unshift(chunk: any, encoding?: BufferEncoding): void; + /** + * Prior to Node.js 0.10, streams did not implement the entire `stream` module API + * as it is currently defined. (See `Compatibility` for more information.) + * + * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable` + * stream that uses + * the old stream as its data source. + * + * It will rarely be necessary to use `readable.wrap()` but the method has been + * provided as a convenience for interacting with older Node.js applications and + * libraries. + * + * ```js + * const { OldReader } = require('./old-api-module.js'); + * const { Readable } = require('stream'); + * const oreader = new OldReader(); + * const myReader = new Readable().wrap(oreader); + * + * myReader.on('readable', () => { + * myReader.read(); // etc. + * }); + * ``` + * @since v0.9.4 + * @param stream An "old style" readable stream + */ + wrap(stream: NodeJS.ReadableStream): this; + push(chunk: any, encoding?: BufferEncoding): boolean; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable + * stream will release any internal resources and subsequent calls to `push()`will be ignored. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, but instead implement `readable._destroy()`. + * @since v8.0.0 + * @param error Error which will be passed as payload in `'error'` event + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. end + * 4. error + * 5. pause + * 6. readable + * 7. resume + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: any) => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'readable', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'data', chunk: any): boolean; + emit(event: 'end'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'pause'): boolean; + emit(event: 'readable'): boolean; + emit(event: 'resume'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: any) => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'readable', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: any) => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'readable', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: any) => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'readable', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: any) => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'readable', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: 'close', listener: () => void): this; + removeListener(event: 'data', listener: (chunk: any) => void): this; + removeListener(event: 'end', listener: () => void): this; + removeListener(event: 'error', listener: (err: Error) => void): this; + removeListener(event: 'pause', listener: () => void): this; + removeListener(event: 'readable', listener: () => void): this; + removeListener(event: 'resume', listener: () => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + interface WritableOptions extends StreamOptions { + decodeStrings?: boolean | undefined; + defaultEncoding?: BufferEncoding | undefined; + write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Writable, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + final?(this: Writable, callback: (error?: Error | null) => void): void; + } + /** + * @since v0.9.4 + */ + class Writable extends Stream implements NodeJS.WritableStream { + /** + * A utility method for creating a `Writable` from a web `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb(writableStream: streamWeb.WritableStream, options?: Pick): Writable; + /** + * A utility method for creating a web `WritableStream` from a `Writable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamWritable: Writable): streamWeb.WritableStream; + /** + * Is `true` if it is safe to call `writable.write()`, which means + * the stream has not been destroyed, errored or ended. + * @since v11.4.0 + */ + readonly writable: boolean; + /** + * Is `true` after `writable.end()` has been called. This property + * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. + * @since v12.9.0 + */ + readonly writableEnded: boolean; + /** + * Is set to `true` immediately before the `'finish'` event is emitted. + * @since v12.6.0 + */ + readonly writableFinished: boolean; + /** + * Return the value of `highWaterMark` passed when creating this `Writable`. + * @since v9.3.0 + */ + readonly writableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be written. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly writableLength: number; + /** + * Getter for the property `objectMode` of a given `Writable` stream. + * @since v12.3.0 + */ + readonly writableObjectMode: boolean; + /** + * Number of times `writable.uncork()` needs to be + * called in order to fully uncork the stream. + * @since v13.2.0, v12.16.0 + */ + readonly writableCorked: number; + /** + * Is `true` after `writable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is true after 'close' has been emitted. + * @since v8.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + /** + * Is `true` if the stream's buffer has been full and stream will emit 'drain'. + * @since v15.2.0, v14.17.0 + */ + readonly writableNeedDrain: boolean; + constructor(opts?: WritableOptions); + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + _construct?(callback: (error?: Error | null) => void): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + /** + * The `writable.write()` method writes some data to the stream, and calls the + * supplied `callback` once the data has been fully handled. If an error + * occurs, the `callback` will be called with the error as its + * first argument. The `callback` is called asynchronously and before `'error'` is + * emitted. + * + * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`. + * If `false` is returned, further attempts to write data to the stream should + * stop until the `'drain'` event is emitted. + * + * While a stream is not draining, calls to `write()` will buffer `chunk`, and + * return false. Once all currently buffered chunks are drained (accepted for + * delivery by the operating system), the `'drain'` event will be emitted. + * Once `write()` returns false, do not write more chunks + * until the `'drain'` event is emitted. While calling `write()` on a stream that + * is not draining is allowed, Node.js will buffer all written chunks until + * maximum memory usage occurs, at which point it will abort unconditionally. + * Even before it aborts, high memory usage will cause poor garbage collector + * performance and high RSS (which is not typically released back to the system, + * even after the memory is no longer required). Since TCP sockets may never + * drain if the remote peer does not read the data, writing a socket that is + * not draining may lead to a remotely exploitable vulnerability. + * + * Writing data while the stream is not draining is particularly + * problematic for a `Transform`, because the `Transform` streams are paused + * by default until they are piped or a `'data'` or `'readable'` event handler + * is added. + * + * If the data to be written can be generated or fetched on demand, it is + * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is + * possible to respect backpressure and avoid memory issues using the `'drain'` event: + * + * ```js + * function write(data, cb) { + * if (!stream.write(data)) { + * stream.once('drain', cb); + * } else { + * process.nextTick(cb); + * } + * } + * + * // Wait for cb to be called before doing any other write. + * write('hello', () => { + * console.log('Write completed, do more writes now.'); + * }); + * ``` + * + * A `Writable` stream in object mode will always ignore the `encoding` argument. + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param [encoding='utf8'] The encoding, if `chunk` is a string. + * @param callback Callback for when this chunk of data is flushed. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; + /** + * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. + * @since v0.11.15 + * @param encoding The new default encoding + */ + setDefaultEncoding(encoding: BufferEncoding): this; + /** + * Calling the `writable.end()` method signals that no more data will be written + * to the `Writable`. The optional `chunk` and `encoding` arguments allow one + * final additional chunk of data to be written immediately before closing the + * stream. + * + * Calling the {@link write} method after calling {@link end} will raise an error. + * + * ```js + * // Write 'hello, ' and then end with 'world!'. + * const fs = require('fs'); + * const file = fs.createWriteStream('example.txt'); + * file.write('hello, '); + * file.end('world!'); + * // Writing more now is not allowed! + * ``` + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param encoding The encoding if `chunk` is a string + * @param callback Callback for when the stream is finished. + */ + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; + /** + * The `writable.cork()` method forces all written data to be buffered in memory. + * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. + * + * The primary intent of `writable.cork()` is to accommodate a situation in which + * several small chunks are written to the stream in rapid succession. Instead of + * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them + * all to `writable._writev()`, if present. This prevents a head-of-line blocking + * situation where data is being buffered while waiting for the first small chunk + * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput. + * + * See also: `writable.uncork()`, `writable._writev()`. + * @since v0.11.2 + */ + cork(): void; + /** + * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. + * + * When using `writable.cork()` and `writable.uncork()` to manage the buffering + * of writes to a stream, defer calls to `writable.uncork()` using`process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event + * loop phase. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.write('data '); + * process.nextTick(() => stream.uncork()); + * ``` + * + * If the `writable.cork()` method is called multiple times on a stream, the + * same number of calls to `writable.uncork()` must be called to flush the buffered + * data. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.cork(); + * stream.write('data '); + * process.nextTick(() => { + * stream.uncork(); + * // The data will not be flushed until uncork() is called a second time. + * stream.uncork(); + * }); + * ``` + * + * See also: `writable.cork()`. + * @since v0.11.2 + */ + uncork(): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable + * stream has ended and subsequent calls to `write()` or `end()` will result in + * an `ERR_STREAM_DESTROYED` error. + * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. + * Use `end()` instead of destroy if data should flush before close, or wait for + * the `'drain'` event before destroying the stream. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, + * but instead implement `writable._destroy()`. + * @since v8.0.0 + * @param error Optional, an error to emit with `'error'` event. + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. drain + * 3. error + * 4. finish + * 5. pipe + * 6. unpipe + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'pipe', listener: (src: Readable) => void): this; + addListener(event: 'unpipe', listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'drain'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'finish'): boolean; + emit(event: 'pipe', src: Readable): boolean; + emit(event: 'unpipe', src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'pipe', listener: (src: Readable) => void): this; + on(event: 'unpipe', listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'pipe', listener: (src: Readable) => void): this; + once(event: 'unpipe', listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'pipe', listener: (src: Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: 'close', listener: () => void): this; + removeListener(event: 'drain', listener: () => void): this; + removeListener(event: 'error', listener: (err: Error) => void): this; + removeListener(event: 'finish', listener: () => void): this; + removeListener(event: 'pipe', listener: (src: Readable) => void): this; + removeListener(event: 'unpipe', listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + interface DuplexOptions extends ReadableOptions, WritableOptions { + allowHalfOpen?: boolean | undefined; + readableObjectMode?: boolean | undefined; + writableObjectMode?: boolean | undefined; + readableHighWaterMark?: number | undefined; + writableHighWaterMark?: number | undefined; + writableCorked?: number | undefined; + construct?(this: Duplex, callback: (error?: Error | null) => void): void; + read?(this: Duplex, size: number): void; + write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Duplex, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + final?(this: Duplex, callback: (error?: Error | null) => void): void; + destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void; + } + /** + * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Duplex` streams include: + * + * * `TCP sockets` + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Duplex extends Readable implements Writable { + readonly writable: boolean; + readonly writableEnded: boolean; + readonly writableFinished: boolean; + readonly writableHighWaterMark: number; + readonly writableLength: number; + readonly writableObjectMode: boolean; + readonly writableCorked: number; + readonly writableNeedDrain: boolean; + readonly closed: boolean; + readonly errored: Error | null; + /** + * If `false` then the stream will automatically end the writable side when the + * readable side ends. Set initially by the `allowHalfOpen` constructor option, + * which defaults to `false`. + * + * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is + * emitted. + * @since v0.9.4 + */ + allowHalfOpen: boolean; + constructor(opts?: DuplexOptions); + /** + * A utility method for creating duplex streams. + * + * - `Stream` converts writable stream into writable `Duplex` and readable stream + * to `Duplex`. + * - `Blob` converts into readable `Duplex`. + * - `string` converts into readable `Duplex`. + * - `ArrayBuffer` converts into readable `Duplex`. + * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. + * - `AsyncGeneratorFunction` converts into a readable/writable transform + * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield + * `null`. + * - `AsyncFunction` converts into a writable `Duplex`. Must return + * either `null` or `undefined` + * - `Object ({ writable, readable })` converts `readable` and + * `writable` into `Stream` and then combines them into `Duplex` where the + * `Duplex` will write to the `writable` and read from the `readable`. + * - `Promise` converts into readable `Duplex`. Value `null` is ignored. + * + * @since v16.8.0 + */ + static from(src: Stream | NodeBlob | ArrayBuffer | string | Iterable | AsyncIterable | AsyncGeneratorFunction | Promise | Object): Duplex; + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + _destroy(error: Error | null, callback: (error: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; + setDefaultEncoding(encoding: BufferEncoding): this; + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; + cork(): void; + uncork(): void; + } + type TransformCallback = (error?: Error | null, data?: any) => void; + interface TransformOptions extends DuplexOptions { + construct?(this: Transform, callback: (error?: Error | null) => void): void; + read?(this: Transform, size: number): void; + write?(this: Transform, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Transform, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + final?(this: Transform, callback: (error?: Error | null) => void): void; + destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void; + transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + flush?(this: Transform, callback: TransformCallback): void; + } + /** + * Transform streams are `Duplex` streams where the output is in some way + * related to the input. Like all `Duplex` streams, `Transform` streams + * implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Transform` streams include: + * + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Transform extends Duplex { + constructor(opts?: TransformOptions); + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + _flush(callback: TransformCallback): void; + } + /** + * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is + * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams. + */ + class PassThrough extends Transform {} + /** + * Attaches an AbortSignal to a readable or writeable stream. This lets code + * control stream destruction using an `AbortController`. + * + * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream. + * + * ```js + * const fs = require('fs'); + * + * const controller = new AbortController(); + * const read = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')) + * ); + * // Later, abort the operation closing the stream + * controller.abort(); + * ``` + * + * Or using an `AbortSignal` with a readable stream as an async iterable: + * + * ```js + * const controller = new AbortController(); + * setTimeout(() => controller.abort(), 10_000); // set a timeout + * const stream = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')) + * ); + * (async () => { + * try { + * for await (const chunk of stream) { + * await process(chunk); + * } + * } catch (e) { + * if (e.name === 'AbortError') { + * // The operation was cancelled + * } else { + * throw e; + * } + * } + * })(); + * ``` + * @since v15.4.0 + * @param signal A signal representing possible cancellation + * @param stream a stream to attach a signal to + */ + function addAbortSignal(signal: AbortSignal, stream: T): T; + interface FinishedOptions extends Abortable { + error?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + } + /** + * A function to get notified when a stream is no longer readable, writable + * or has experienced an error or a premature close event. + * + * ```js + * const { finished } = require('stream'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * finished(rs, (err) => { + * if (err) { + * console.error('Stream failed.', err); + * } else { + * console.log('Stream is done reading.'); + * } + * }); + * + * rs.resume(); // Drain the stream. + * ``` + * + * Especially useful in error handling scenarios where a stream is destroyed + * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`. + * + * The `finished` API provides promise version: + * + * ```js + * const { finished } = require('stream/promises'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * async function run() { + * await finished(rs); + * console.log('Stream is done reading.'); + * } + * + * run().catch(console.error); + * rs.resume(); // Drain the stream. + * ``` + * + * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been + * invoked. The reason for this is so that unexpected `'error'` events (due to + * incorrect stream implementations) do not cause unexpected crashes. + * If this is unwanted behavior then the returned cleanup function needs to be + * invoked in the callback: + * + * ```js + * const cleanup = finished(rs, (err) => { + * cleanup(); + * // ... + * }); + * ``` + * @since v10.0.0 + * @param stream A readable and/or writable stream. + * @param callback A callback function that takes an optional error argument. + * @return A cleanup function which removes all registered listeners. + */ + function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void; + function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void; + namespace finished { + function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise; + } + type PipelineSourceFunction = () => Iterable | AsyncIterable; + type PipelineSource = Iterable | AsyncIterable | NodeJS.ReadableStream | PipelineSourceFunction; + type PipelineTransform, U> = + | NodeJS.ReadWriteStream + | ((source: S extends (...args: any[]) => Iterable | AsyncIterable ? AsyncIterable : S) => AsyncIterable); + type PipelineTransformSource = PipelineSource | PipelineTransform; + type PipelineDestinationIterableFunction = (source: AsyncIterable) => AsyncIterable; + type PipelineDestinationPromiseFunction = (source: AsyncIterable) => Promise

; + type PipelineDestination, P> = S extends PipelineTransformSource + ? NodeJS.WritableStream | PipelineDestinationIterableFunction | PipelineDestinationPromiseFunction + : never; + type PipelineCallback> = S extends PipelineDestinationPromiseFunction + ? (err: NodeJS.ErrnoException | null, value: P) => void + : (err: NodeJS.ErrnoException | null) => void; + type PipelinePromise> = S extends PipelineDestinationPromiseFunction ? Promise

: Promise; + interface PipelineOptions { + signal: AbortSignal; + } + /** + * A module method to pipe between streams and generators forwarding errors and + * properly cleaning up and provide a callback when the pipeline is complete. + * + * ```js + * const { pipeline } = require('stream'); + * const fs = require('fs'); + * const zlib = require('zlib'); + * + * // Use the pipeline API to easily pipe a series of streams + * // together and get notified when the pipeline is fully done. + * + * // A pipeline to gzip a potentially huge tar file efficiently: + * + * pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * (err) => { + * if (err) { + * console.error('Pipeline failed.', err); + * } else { + * console.log('Pipeline succeeded.'); + * } + * } + * ); + * ``` + * + * The `pipeline` API provides a promise version, which can also + * receive an options argument as the last parameter with a`signal` `AbortSignal` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with + * an`AbortError`. + * + * ```js + * const { pipeline } = require('stream/promises'); + * + * async function run() { + * await pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz') + * ); + * console.log('Pipeline succeeded.'); + * } + * + * run().catch(console.error); + * ``` + * + * To use an `AbortSignal`, pass it inside an options object, + * as the last argument: + * + * ```js + * const { pipeline } = require('stream/promises'); + * + * async function run() { + * const ac = new AbortController(); + * const signal = ac.signal; + * + * setTimeout(() => ac.abort(), 1); + * await pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * { signal }, + * ); + * } + * + * run().catch(console.error); // AbortError + * ``` + * + * The `pipeline` API also supports async generators: + * + * ```js + * const { pipeline } = require('stream/promises'); + * const fs = require('fs'); + * + * async function run() { + * await pipeline( + * fs.createReadStream('lowercase.txt'), + * async function* (source, { signal }) { + * source.setEncoding('utf8'); // Work with strings rather than `Buffer`s. + * for await (const chunk of source) { + * yield await processChunk(chunk, { signal }); + * } + * }, + * fs.createWriteStream('uppercase.txt') + * ); + * console.log('Pipeline succeeded.'); + * } + * + * run().catch(console.error); + * ``` + * + * Remember to handle the `signal` argument passed into the async generator. + * Especially in the case where the async generator is the source for the + * pipeline (i.e. first argument) or the pipeline will never complete. + * + * ```js + * const { pipeline } = require('stream/promises'); + * const fs = require('fs'); + * + * async function run() { + * await pipeline( + * async function* ({ signal }) { + * await someLongRunningfn({ signal }); + * yield 'asd'; + * }, + * fs.createWriteStream('uppercase.txt') + * ); + * console.log('Pipeline succeeded.'); + * } + * + * run().catch(console.error); + * ``` + * + * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: + * + * * `Readable` streams which have emitted `'end'` or `'close'`. + * * `Writable` streams which have emitted `'finish'` or `'close'`. + * + * `stream.pipeline()` leaves dangling event listeners on the streams + * after the `callback` has been invoked. In the case of reuse of streams after + * failure, this can cause event listener leaks and swallowed errors. If the last + * stream is readable, dangling event listeners will be removed so that the last + * stream can be consumed later. + * + * `stream.pipeline()` closes all the streams when an error is raised. + * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior + * once it would destroy the socket without sending the expected response. + * See the example below: + * + * ```js + * const fs = require('fs'); + * const http = require('http'); + * const { pipeline } = require('stream'); + * + * const server = http.createServer((req, res) => { + * const fileStream = fs.createReadStream('./fileNotExist.txt'); + * pipeline(fileStream, res, (err) => { + * if (err) { + * console.log(err); // No such file + * // this message can't be sent once `pipeline` already destroyed the socket + * return res.end('error!!!'); + * } + * }); + * }); + * ``` + * @since v10.0.0 + * @param callback Called when the pipeline is fully done. + */ + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + callback?: PipelineCallback + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline, T1 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + destination: B, + callback?: PipelineCallback + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + transform2: T2, + destination: B, + callback?: PipelineCallback + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline( + streams: ReadonlyArray, + callback?: (err: NodeJS.ErrnoException | null) => void + ): NodeJS.WritableStream; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array void)> + ): NodeJS.WritableStream; + namespace pipeline { + function __promisify__, B extends PipelineDestination>(source: A, destination: B, options?: PipelineOptions): PipelinePromise; + function __promisify__, T1 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function __promisify__, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise; + function __promisify__(streams: ReadonlyArray, options?: PipelineOptions): Promise; + function __promisify__( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; + } + interface Pipe { + close(): void; + hasRef(): boolean; + ref(): void; + unref(): void; + } + + /** + * Returns whether the stream has encountered an error. + * @since v17.3.0 + */ + function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; + + /** + * Returns whether the stream is readable. + * @since v17.4.0 + */ + function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; + + const promises: typeof streamPromises; + const consumers: typeof streamConsumers; + } + export = internal; +} +declare module 'node:stream' { + import stream = require('stream'); + export = stream; +} diff --git a/node_modules/@types/node/stream/consumers.d.ts b/node_modules/@types/node/stream/consumers.d.ts new file mode 100755 index 0000000..1ebf12e --- /dev/null +++ b/node_modules/@types/node/stream/consumers.d.ts @@ -0,0 +1,12 @@ +declare module 'stream/consumers' { + import { Blob as NodeBlob } from "node:buffer"; + import { Readable } from 'node:stream'; + function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function text(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function arrayBuffer(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function blob(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function json(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; +} +declare module 'node:stream/consumers' { + export * from 'stream/consumers'; +} diff --git a/node_modules/@types/node/stream/promises.d.ts b/node_modules/@types/node/stream/promises.d.ts new file mode 100755 index 0000000..b427073 --- /dev/null +++ b/node_modules/@types/node/stream/promises.d.ts @@ -0,0 +1,42 @@ +declare module 'stream/promises' { + import { FinishedOptions, PipelineSource, PipelineTransform, PipelineDestination, PipelinePromise, PipelineOptions } from 'node:stream'; + function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise; + function pipeline, B extends PipelineDestination>(source: A, destination: B, options?: PipelineOptions): PipelinePromise; + function pipeline, T1 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function pipeline, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise; + function pipeline(streams: ReadonlyArray, options?: PipelineOptions): Promise; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; +} +declare module 'node:stream/promises' { + export * from 'stream/promises'; +} diff --git a/node_modules/@types/node/stream/web.d.ts b/node_modules/@types/node/stream/web.d.ts new file mode 100755 index 0000000..f9ef057 --- /dev/null +++ b/node_modules/@types/node/stream/web.d.ts @@ -0,0 +1,330 @@ +declare module 'stream/web' { + // stub module, pending copy&paste from .d.ts or manual impl + // copy from lib.dom.d.ts + interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream + * through a transform stream (or any other { writable, readable } + * pair). It simply pipes the stream into the writable side of the + * supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + */ + writable: WritableStream; + } + interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. + * The way in which the piping process behaves under various error + * conditions can be customized with a number of passed options. It + * returns a promise that fulfills when the piping process completes + * successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate + * as follows: + * + * An error in this source readable stream will abort destination, + * unless preventAbort is truthy. The returned promise will be rejected + * with the source's error, or with any error that occurs during + * aborting the destination. + * + * An error in destination will cancel this source readable stream, + * unless preventCancel is truthy. The returned promise will be rejected + * with the destination's error, or with any error that occurs during + * canceling the source. + * + * When this source readable stream closes, destination will be closed, + * unless preventClose is truthy. The returned promise will be fulfilled + * once this process completes, unless an error is encountered while + * closing the destination, in which case it will be rejected with that + * error. + * + * If destination starts out closed or closing, this source readable + * stream will be canceled, unless preventCancel is true. The returned + * promise will be rejected with an error indicating piping to a closed + * stream failed, or with any error that occurs during canceling the + * source. + * + * The signal option can be set to an AbortSignal to allow aborting an + * ongoing pipe operation via the corresponding AbortController. In this + * case, this source readable stream will be canceled, and destination + * aborted, unless the respective options preventCancel or preventAbort + * are set. + */ + preventClose?: boolean; + signal?: AbortSignal; + } + interface ReadableStreamGenericReader { + readonly closed: Promise; + cancel(reason?: any): Promise; + } + interface ReadableStreamDefaultReadValueResult { + done: false; + value: T; + } + interface ReadableStreamDefaultReadDoneResult { + done: true; + value?: undefined; + } + type ReadableStreamController = ReadableStreamDefaultController; + type ReadableStreamDefaultReadResult = ReadableStreamDefaultReadValueResult | ReadableStreamDefaultReadDoneResult; + interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; + } + interface UnderlyingSinkAbortCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSinkCloseCallback { + (): void | PromiseLike; + } + interface UnderlyingSinkStartCallback { + (controller: WritableStreamDefaultController): any; + } + interface UnderlyingSinkWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike; + } + interface UnderlyingSourceCancelCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSourcePullCallback { + (controller: ReadableStreamController): void | PromiseLike; + } + interface UnderlyingSourceStartCallback { + (controller: ReadableStreamController): any; + } + interface TransformerFlushCallback { + (controller: TransformStreamDefaultController): void | PromiseLike; + } + interface TransformerStartCallback { + (controller: TransformStreamDefaultController): any; + } + interface TransformerTransformCallback { + (chunk: I, controller: TransformStreamDefaultController): void | PromiseLike; + } + interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: 'bytes'; + } + interface UnderlyingSource { + cancel?: UnderlyingSourceCancelCallback; + pull?: UnderlyingSourcePullCallback; + start?: UnderlyingSourceStartCallback; + type?: undefined; + } + interface UnderlyingSink { + abort?: UnderlyingSinkAbortCallback; + close?: UnderlyingSinkCloseCallback; + start?: UnderlyingSinkStartCallback; + type?: undefined; + write?: UnderlyingSinkWriteCallback; + } + interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; + } + /** This Streams API interface represents a readable stream of byte data. */ + interface ReadableStream { + readonly locked: boolean; + cancel(reason?: any): Promise; + getReader(): ReadableStreamDefaultReader; + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + tee(): [ReadableStream, ReadableStream]; + values(options?: { preventCancel?: boolean }): AsyncIterableIterator; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + const ReadableStream: { + prototype: ReadableStream; + new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new (underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; + }; + interface ReadableStreamDefaultReader extends ReadableStreamGenericReader { + read(): Promise>; + releaseLock(): void; + } + const ReadableStreamDefaultReader: { + prototype: ReadableStreamDefaultReader; + new (stream: ReadableStream): ReadableStreamDefaultReader; + }; + const ReadableStreamBYOBReader: any; + const ReadableStreamBYOBRequest: any; + interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; + } + const ReadableByteStreamController: { + prototype: ReadableByteStreamController; + new (): ReadableByteStreamController; + }; + interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk?: R): void; + error(e?: any): void; + } + const ReadableStreamDefaultController: { + prototype: ReadableStreamDefaultController; + new (): ReadableStreamDefaultController; + }; + interface Transformer { + flush?: TransformerFlushCallback; + readableType?: undefined; + start?: TransformerStartCallback; + transform?: TransformerTransformCallback; + writableType?: undefined; + } + interface TransformStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const TransformStream: { + prototype: TransformStream; + new (transformer?: Transformer, writableStrategy?: QueuingStrategy, readableStrategy?: QueuingStrategy): TransformStream; + }; + interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk?: O): void; + error(reason?: any): void; + terminate(): void; + } + const TransformStreamDefaultController: { + prototype: TransformStreamDefaultController; + new (): TransformStreamDefaultController; + }; + /** + * This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in back pressure and queuing. + */ + interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + close(): Promise; + getWriter(): WritableStreamDefaultWriter; + } + const WritableStream: { + prototype: WritableStream; + new (underlyingSink?: UnderlyingSink, strategy?: QueuingStrategy): WritableStream; + }; + /** + * This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. + */ + interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk?: W): Promise; + } + const WritableStreamDefaultWriter: { + prototype: WritableStreamDefaultWriter; + new (stream: WritableStream): WritableStreamDefaultWriter; + }; + /** + * This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the + * underlying sink is given a corresponding WritableStreamDefaultController + * instance to manipulate. + */ + interface WritableStreamDefaultController { + error(e?: any): void; + } + const WritableStreamDefaultController: { + prototype: WritableStreamDefaultController; + new (): WritableStreamDefaultController; + }; + interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySize; + } + interface QueuingStrategySize { + (chunk?: T): number; + } + interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water + * mark. + * + * Note that the provided high water mark will not be validated ahead of + * time. Instead, if it is negative, NaN, or not a number, the resulting + * ByteLengthQueuingStrategy will cause the corresponding stream + * constructor to throw. + */ + highWaterMark: number; + } + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface ByteLengthQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const ByteLengthQueuingStrategy: { + prototype: ByteLengthQueuingStrategy; + new (init: QueuingStrategyInit): ByteLengthQueuingStrategy; + }; + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface CountQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const CountQueuingStrategy: { + prototype: CountQueuingStrategy; + new (init: QueuingStrategyInit): CountQueuingStrategy; + }; + interface TextEncoderStream { + /** Returns "utf-8". */ + readonly encoding: 'utf-8'; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextEncoderStream: { + prototype: TextEncoderStream; + new (): TextEncoderStream; + }; + interface TextDecoderOptions { + fatal?: boolean; + ignoreBOM?: boolean; + } + type BufferSource = ArrayBufferView | ArrayBuffer; + interface TextDecoderStream { + /** Returns encoding's name, lower cased. */ + readonly encoding: string; + /** Returns `true` if error mode is "fatal", and `false` otherwise. */ + readonly fatal: boolean; + /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */ + readonly ignoreBOM: boolean; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextDecoderStream: { + prototype: TextDecoderStream; + new (label?: string, options?: TextDecoderOptions): TextDecoderStream; + }; +} +declare module 'node:stream/web' { + export * from 'stream/web'; +} diff --git a/node_modules/@types/node/string_decoder.d.ts b/node_modules/@types/node/string_decoder.d.ts new file mode 100755 index 0000000..a585804 --- /dev/null +++ b/node_modules/@types/node/string_decoder.d.ts @@ -0,0 +1,67 @@ +/** + * The `string_decoder` module provides an API for decoding `Buffer` objects into + * strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16 + * characters. It can be accessed using: + * + * ```js + * const { StringDecoder } = require('string_decoder'); + * ``` + * + * The following example shows the basic use of the `StringDecoder` class. + * + * ```js + * const { StringDecoder } = require('string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * const cent = Buffer.from([0xC2, 0xA2]); + * console.log(decoder.write(cent)); + * + * const euro = Buffer.from([0xE2, 0x82, 0xAC]); + * console.log(decoder.write(euro)); + * ``` + * + * When a `Buffer` instance is written to the `StringDecoder` instance, an + * internal buffer is used to ensure that the decoded string does not contain + * any incomplete multibyte characters. These are held in the buffer until the + * next call to `stringDecoder.write()` or until `stringDecoder.end()` is called. + * + * In the following example, the three UTF-8 encoded bytes of the European Euro + * symbol (`€`) are written over three separate operations: + * + * ```js + * const { StringDecoder } = require('string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * decoder.write(Buffer.from([0xE2])); + * decoder.write(Buffer.from([0x82])); + * console.log(decoder.end(Buffer.from([0xAC]))); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/string_decoder.js) + */ +declare module 'string_decoder' { + class StringDecoder { + constructor(encoding?: BufferEncoding); + /** + * Returns a decoded string, ensuring that any incomplete multibyte characters at + * the end of the `Buffer`, or `TypedArray`, or `DataView` are omitted from the + * returned string and stored in an internal buffer for the next call to`stringDecoder.write()` or `stringDecoder.end()`. + * @since v0.1.99 + * @param buffer A `Buffer`, or `TypedArray`, or `DataView` containing the bytes to decode. + */ + write(buffer: Buffer): string; + /** + * Returns any remaining input stored in the internal buffer as a string. Bytes + * representing incomplete UTF-8 and UTF-16 characters will be replaced with + * substitution characters appropriate for the character encoding. + * + * If the `buffer` argument is provided, one final call to `stringDecoder.write()`is performed before returning the remaining input. + * After `end()` is called, the `stringDecoder` object can be reused for new input. + * @since v0.9.3 + * @param buffer A `Buffer`, or `TypedArray`, or `DataView` containing the bytes to decode. + */ + end(buffer?: Buffer): string; + } +} +declare module 'node:string_decoder' { + export * from 'string_decoder'; +} diff --git a/node_modules/@types/node/test.d.ts b/node_modules/@types/node/test.d.ts new file mode 100755 index 0000000..a9b4eeb --- /dev/null +++ b/node_modules/@types/node/test.d.ts @@ -0,0 +1,314 @@ +/** + * The `node:test` module provides a standalone testing module. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/test.js) + */ +declare module 'node:test' { + /** + * Programmatically start the test runner. + * @since v18.9.0 + * @param options Configuration options for running tests. + * @returns A {@link TapStream} that emits events about the test execution. + */ + function run(options?: RunOptions): TapStream; + + /** + * The `test()` function is the value imported from the test module. Each invocation of this + * function results in the creation of a test point in the TAP output. + * + * The {@link TestContext} object passed to the fn argument can be used to perform actions + * related to the current test. Examples include skipping the test, adding additional TAP + * diagnostic information, or creating subtests. + * + * `test()` returns a {@link Promise} that resolves once the test completes. The return value + * can usually be discarded for top level tests. However, the return value from subtests should + * be used to prevent the parent test from finishing first and cancelling the subtest as shown + * in the following example. + * + * ```js + * test('top level test', async (t) => { + * // The setTimeout() in the following subtest would cause it to outlive its + * // parent test if 'await' is removed on the next line. Once the parent test + * // completes, it will cancel any outstanding subtests. + * await t.test('longer running subtest', async (t) => { + * return new Promise((resolve, reject) => { + * setTimeout(resolve, 1000); + * }); + * }); + * }); + * ``` + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + function test(name?: string, fn?: TestFn): Promise; + function test(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function test(options?: TestOptions, fn?: TestFn): Promise; + function test(fn?: TestFn): Promise; + + /** + * @since v18.6.0 + * @param name The name of the suite, which is displayed when reporting suite results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the suite + * @param fn The function under suite. Default: A no-op function. + */ + function describe(name?: string, options?: TestOptions, fn?: SuiteFn): void; + function describe(name?: string, fn?: SuiteFn): void; + function describe(options?: TestOptions, fn?: SuiteFn): void; + function describe(fn?: SuiteFn): void; + + /** + * @since v18.6.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + */ + function it(name?: string, options?: TestOptions, fn?: ItFn): void; + function it(name?: string, fn?: ItFn): void; + function it(options?: TestOptions, fn?: ItFn): void; + function it(fn?: ItFn): void; + + /** + * The type of a function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is passed as + * the second argument. + */ + type TestFn = (t: TestContext, done: (result?: any) => void) => any; + + /** + * The type of a function under Suite. + * If the test uses callbacks, the callback function is passed as an argument + */ + type SuiteFn = (done: (result?: any) => void) => void; + + /** + * The type of a function under test. + * If the test uses callbacks, the callback function is passed as an argument + */ + type ItFn = (done: (result?: any) => void) => any; + + interface RunOptions { + /** + * @default false + */ + concurrency?: number | boolean; + + /** + * An array containing the list of files to run. If unspecified, the test runner execution model will be used. + */ + files?: readonly string[]; + + /** + * Allows aborting an in-progress test. + * @default undefined + */ + signal?: AbortSignal; + + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + */ + timeout?: number; + } + + /** + * A successful call of the run() method will return a new TapStream object, streaming a TAP output. + * TapStream will emit events in the order of the tests' definitions. + * @since v18.9.0 + */ + interface TapStream extends NodeJS.ReadableStream { + addListener(event: 'test:diagnostic', listener: (message: string) => void): this; + addListener(event: 'test:fail', listener: (data: TestFail) => void): this; + addListener(event: 'test:pass', listener: (data: TestPass) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + emit(event: 'test:diagnostic', message: string): boolean; + emit(event: 'test:fail', data: TestFail): boolean; + emit(event: 'test:pass', data: TestPass): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'test:diagnostic', listener: (message: string) => void): this; + on(event: 'test:fail', listener: (data: TestFail) => void): this; + on(event: 'test:pass', listener: (data: TestPass) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: 'test:diagnostic', listener: (message: string) => void): this; + once(event: 'test:fail', listener: (data: TestFail) => void): this; + once(event: 'test:pass', listener: (data: TestPass) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'test:diagnostic', listener: (message: string) => void): this; + prependListener(event: 'test:fail', listener: (data: TestFail) => void): this; + prependListener(event: 'test:pass', listener: (data: TestPass) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'test:diagnostic', listener: (message: string) => void): this; + prependOnceListener(event: 'test:fail', listener: (data: TestFail) => void): this; + prependOnceListener(event: 'test:pass', listener: (data: TestPass) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + } + + interface TestFail { + /** + * The test duration. + */ + duration: number; + + /** + * The failure casing test to fail. + */ + error: Error; + + /** + * The test name. + */ + name: string; + + /** + * The ordinal number of the test. + */ + testNumber: number; + + /** + * Present if `context.todo` is called. + */ + todo?: string; + + /** + * Present if `context.skip` is called. + */ + skip?: string; + } + + interface TestPass { + /** + * The test duration. + */ + duration: number; + + /** + * The test name. + */ + name: string; + + /** + * The ordinal number of the test. + */ + testNumber: number; + + /** + * Present if `context.todo` is called. + */ + todo?: string; + + /** + * Present if `context.skip` is called. + */ + skip?: string; + } + + /** + * An instance of `TestContext` is passed to each test function in order to interact with the + * test runner. However, the `TestContext` constructor is not exposed as part of the API. + * @since v18.0.0 + */ + interface TestContext { + /** + * This function is used to write TAP diagnostics to the output. Any diagnostic information is + * included at the end of the test's results. This function does not return a value. + * @param message Message to be displayed as a TAP diagnostic. + * @since v18.0.0 + */ + diagnostic(message: string): void; + + /** + * If `shouldRunOnlyTests` is truthy, the test context will only run tests that have the `only` + * option set. Otherwise, all tests are run. If Node.js was not started with the `--test-only` + * command-line option, this function is a no-op. + * @param shouldRunOnlyTests Whether or not to run `only` tests. + * @since v18.0.0 + */ + runOnly(shouldRunOnlyTests: boolean): void; + + /** + * This function causes the test's output to indicate the test as skipped. If `message` is + * provided, it is included in the TAP output. Calling `skip()` does not terminate execution of + * the test function. This function does not return a value. + * @param message Optional skip message to be displayed in TAP output. + * @since v18.0.0 + */ + skip(message?: string): void; + + /** + * This function adds a `TODO` directive to the test's output. If `message` is provided, it is + * included in the TAP output. Calling `todo()` does not terminate execution of the test + * function. This function does not return a value. + * @param message Optional `TODO` message to be displayed in TAP output. + * @since v18.0.0 + */ + todo(message?: string): void; + + /** + * This function is used to create subtests under the current test. This function behaves in + * the same fashion as the top level {@link test} function. + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. This first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + test: typeof test; + } + + interface TestOptions { + /** + * The number of tests that can be run at the same time. If unspecified, subtests inherit this + * value from their parent. + * @default 1 + */ + concurrency?: number; + + /** + * If truthy, and the test context is configured to run `only` tests, then this test will be + * run. Otherwise, the test is skipped. + * @default false + */ + only?: boolean; + + /** + * Allows aborting an in-progress test. + * @since v18.8.0 + */ + signal?: AbortSignal; + + /** + * If truthy, the test is skipped. If a string is provided, that string is displayed in the + * test results as the reason for skipping the test. + * @default false + */ + skip?: boolean | string; + + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + * @since v18.7.0 + */ + timeout?: number; + + /** + * If truthy, the test marked as `TODO`. If a string is provided, that string is displayed in + * the test results as the reason why the test is `TODO`. + * @default false + */ + todo?: boolean | string; + } + + export { test as default, run, test, describe, it }; +} diff --git a/node_modules/@types/node/timers.d.ts b/node_modules/@types/node/timers.d.ts new file mode 100755 index 0000000..b26f3ce --- /dev/null +++ b/node_modules/@types/node/timers.d.ts @@ -0,0 +1,94 @@ +/** + * The `timer` module exposes a global API for scheduling functions to + * be called at some future period of time. Because the timer functions are + * globals, there is no need to call `require('timers')` to use the API. + * + * The timer functions within Node.js implement a similar API as the timers API + * provided by Web Browsers but use a different internal implementation that is + * built around the Node.js [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout). + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/timers.js) + */ +declare module 'timers' { + import { Abortable } from 'node:events'; + import { setTimeout as setTimeoutPromise, setImmediate as setImmediatePromise, setInterval as setIntervalPromise } from 'node:timers/promises'; + interface TimerOptions extends Abortable { + /** + * Set to `false` to indicate that the scheduled `Timeout` + * should not require the Node.js event loop to remain active. + * @default true + */ + ref?: boolean | undefined; + } + let setTimeout: typeof global.setTimeout; + let clearTimeout: typeof global.clearTimeout; + let setInterval: typeof global.setInterval; + let clearInterval: typeof global.clearInterval; + let setImmediate: typeof global.setImmediate; + let clearImmediate: typeof global.clearImmediate; + global { + namespace NodeJS { + // compatibility with older typings + interface Timer extends RefCounted { + hasRef(): boolean; + refresh(): this; + [Symbol.toPrimitive](): number; + } + interface Immediate extends RefCounted { + /** + * If true, the `Immediate` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + _onImmediate: Function; // to distinguish it from the Timeout class + } + interface Timeout extends Timer { + /** + * If true, the `Timeout` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + /** + * Sets the timer's start time to the current time, and reschedules the timer to + * call its callback at the previously specified duration adjusted to the current + * time. This is useful for refreshing a timer without allocating a new + * JavaScript object. + * + * Using this on a timer that has already called its callback will reactivate the + * timer. + * @since v10.2.0 + * @return a reference to `timeout` + */ + refresh(): this; + [Symbol.toPrimitive](): number; + } + } + function setTimeout(callback: (...args: TArgs) => void, ms?: number, ...args: TArgs): NodeJS.Timeout; + // util.promisify no rest args compability + // tslint:disable-next-line void-return + function setTimeout(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setTimeout { + const __promisify__: typeof setTimeoutPromise; + } + function clearTimeout(timeoutId: NodeJS.Timeout | string | number | undefined): void; + function setInterval(callback: (...args: TArgs) => void, ms?: number, ...args: TArgs): NodeJS.Timer; + // util.promisify no rest args compability + // tslint:disable-next-line void-return + function setInterval(callback: (args: void) => void, ms?: number): NodeJS.Timer; + namespace setInterval { + const __promisify__: typeof setIntervalPromise; + } + function clearInterval(intervalId: NodeJS.Timeout | string | number | undefined): void; + function setImmediate(callback: (...args: TArgs) => void, ...args: TArgs): NodeJS.Immediate; + // util.promisify no rest args compability + // tslint:disable-next-line void-return + function setImmediate(callback: (args: void) => void): NodeJS.Immediate; + namespace setImmediate { + const __promisify__: typeof setImmediatePromise; + } + function clearImmediate(immediateId: NodeJS.Immediate | undefined): void; + function queueMicrotask(callback: () => void): void; + } +} +declare module 'node:timers' { + export * from 'timers'; +} diff --git a/node_modules/@types/node/timers/promises.d.ts b/node_modules/@types/node/timers/promises.d.ts new file mode 100755 index 0000000..fd77888 --- /dev/null +++ b/node_modules/@types/node/timers/promises.d.ts @@ -0,0 +1,68 @@ +/** + * The `timers/promises` API provides an alternative set of timer functions + * that return `Promise` objects. The API is accessible via`require('timers/promises')`. + * + * ```js + * import { + * setTimeout, + * setImmediate, + * setInterval, + * } from 'timers/promises'; + * ``` + * @since v15.0.0 + */ +declare module 'timers/promises' { + import { TimerOptions } from 'node:timers'; + /** + * ```js + * import { + * setTimeout, + * } from 'timers/promises'; + * + * const res = await setTimeout(100, 'result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + * @param value A value with which the promise is fulfilled. + */ + function setTimeout(delay?: number, value?: T, options?: TimerOptions): Promise; + /** + * ```js + * import { + * setImmediate, + * } from 'timers/promises'; + * + * const res = await setImmediate('result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param value A value with which the promise is fulfilled. + */ + function setImmediate(value?: T, options?: TimerOptions): Promise; + /** + * Returns an async iterator that generates values in an interval of `delay` ms. + * + * ```js + * import { + * setInterval, + * } from 'timers/promises'; + * + * const interval = 100; + * for await (const startTime of setInterval(interval, Date.now())) { + * const now = Date.now(); + * console.log(now); + * if ((now - startTime) > 1000) + * break; + * } + * console.log(Date.now()); + * ``` + * @since v15.9.0 + */ + function setInterval(delay?: number, value?: T, options?: TimerOptions): AsyncIterable; +} +declare module 'node:timers/promises' { + export * from 'timers/promises'; +} diff --git a/node_modules/@types/node/tls.d.ts b/node_modules/@types/node/tls.d.ts new file mode 100755 index 0000000..2cbc716 --- /dev/null +++ b/node_modules/@types/node/tls.d.ts @@ -0,0 +1,1028 @@ +/** + * The `tls` module provides an implementation of the Transport Layer Security + * (TLS) and Secure Socket Layer (SSL) protocols that is built on top of OpenSSL. + * The module can be accessed using: + * + * ```js + * const tls = require('tls'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/tls.js) + */ +declare module 'tls' { + import { X509Certificate } from 'node:crypto'; + import * as net from 'node:net'; + import * as stream from 'stream'; + const CLIENT_RENEG_LIMIT: number; + const CLIENT_RENEG_WINDOW: number; + interface Certificate { + /** + * Country code. + */ + C: string; + /** + * Street. + */ + ST: string; + /** + * Locality. + */ + L: string; + /** + * Organization. + */ + O: string; + /** + * Organizational unit. + */ + OU: string; + /** + * Common name. + */ + CN: string; + } + interface PeerCertificate { + subject: Certificate; + issuer: Certificate; + subjectaltname: string; + infoAccess: NodeJS.Dict; + modulus: string; + exponent: string; + valid_from: string; + valid_to: string; + fingerprint: string; + fingerprint256: string; + ext_key_usage: string[]; + serialNumber: string; + raw: Buffer; + } + interface DetailedPeerCertificate extends PeerCertificate { + issuerCertificate: DetailedPeerCertificate; + } + interface CipherNameAndProtocol { + /** + * The cipher name. + */ + name: string; + /** + * SSL/TLS protocol version. + */ + version: string; + /** + * IETF name for the cipher suite. + */ + standardName: string; + } + interface EphemeralKeyInfo { + /** + * The supported types are 'DH' and 'ECDH'. + */ + type: string; + /** + * The name property is available only when type is 'ECDH'. + */ + name?: string | undefined; + /** + * The size of parameter of an ephemeral key exchange. + */ + size: number; + } + interface KeyObject { + /** + * Private keys in PEM format. + */ + pem: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface PxfObject { + /** + * PFX or PKCS12 encoded private key and certificate chain. + */ + buf: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions { + /** + * If true the TLS socket will be instantiated in server-mode. + * Defaults to false. + */ + isServer?: boolean | undefined; + /** + * An optional net.Server instance. + */ + server?: net.Server | undefined; + /** + * An optional Buffer instance containing a TLS session. + */ + session?: Buffer | undefined; + /** + * If true, specifies that the OCSP status request extension will be + * added to the client hello and an 'OCSPResponse' event will be + * emitted on the socket before establishing a secure communication + */ + requestOCSP?: boolean | undefined; + } + /** + * Performs transparent encryption of written data and all required TLS + * negotiation. + * + * Instances of `tls.TLSSocket` implement the duplex `Stream` interface. + * + * Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate} will only return data while the + * connection is open. + * @since v0.11.4 + */ + class TLSSocket extends net.Socket { + /** + * Construct a new tls.TLSSocket object from an existing TCP socket. + */ + constructor(socket: net.Socket, options?: TLSSocketOptions); + /** + * This property is `true` if the peer certificate was signed by one of the CAs + * specified when creating the `tls.TLSSocket` instance, otherwise `false`. + * @since v0.11.4 + */ + authorized: boolean; + /** + * Returns the reason why the peer's certificate was not been verified. This + * property is set only when `tlsSocket.authorized === false`. + * @since v0.11.4 + */ + authorizationError: Error; + /** + * Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances. + * @since v0.11.4 + */ + encrypted: true; + /** + * String containing the selected ALPN protocol. + * Before a handshake has completed, this value is always null. + * When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false. + */ + alpnProtocol: string | false | null; + /** + * Returns an object representing the local certificate. The returned object has + * some properties corresponding to the fields of the certificate. + * + * See {@link TLSSocket.getPeerCertificate} for an example of the certificate + * structure. + * + * If there is no local certificate, an empty object will be returned. If the + * socket has been destroyed, `null` will be returned. + * @since v11.2.0 + */ + getCertificate(): PeerCertificate | object | null; + /** + * Returns an object containing information on the negotiated cipher suite. + * + * For example: + * + * ```json + * { + * "name": "AES128-SHA256", + * "standardName": "TLS_RSA_WITH_AES_128_CBC_SHA256", + * "version": "TLSv1.2" + * } + * ``` + * + * See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information. + * @since v0.11.4 + */ + getCipher(): CipherNameAndProtocol; + /** + * Returns an object representing the type, name, and size of parameter of + * an ephemeral key exchange in `perfect forward secrecy` on a client + * connection. It returns an empty object when the key exchange is not + * ephemeral. As this is only supported on a client socket; `null` is returned + * if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`. + * + * For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`. + * @since v5.0.0 + */ + getEphemeralKeyInfo(): EphemeralKeyInfo | object | null; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet. + */ + getFinished(): Buffer | undefined; + /** + * Returns an object representing the peer's certificate. If the peer does not + * provide a certificate, an empty object will be returned. If the socket has been + * destroyed, `null` will be returned. + * + * If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's + * certificate. + * @since v0.11.4 + * @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate. + * @return A certificate object. + */ + getPeerCertificate(detailed: true): DetailedPeerCertificate; + getPeerCertificate(detailed?: false): PeerCertificate; + getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so + * far. + */ + getPeerFinished(): Buffer | undefined; + /** + * Returns a string containing the negotiated SSL/TLS protocol version of the + * current connection. The value `'unknown'` will be returned for connected + * sockets that have not completed the handshaking process. The value `null` will + * be returned for server sockets or disconnected client sockets. + * + * Protocol versions are: + * + * * `'SSLv3'` + * * `'TLSv1'` + * * `'TLSv1.1'` + * * `'TLSv1.2'` + * * `'TLSv1.3'` + * + * See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information. + * @since v5.7.0 + */ + getProtocol(): string | null; + /** + * Returns the TLS session data or `undefined` if no session was + * negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful + * for debugging. + * + * See `Session Resumption` for more information. + * + * Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications + * must use the `'session'` event (it also works for TLSv1.2 and below). + * @since v0.11.4 + */ + getSession(): Buffer | undefined; + /** + * See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. + * @since v12.11.0 + * @return List of signature algorithms shared between the server and the client in the order of decreasing preference. + */ + getSharedSigalgs(): string[]; + /** + * For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`. + * + * It may be useful for debugging. + * + * See `Session Resumption` for more information. + * @since v0.11.4 + */ + getTLSTicket(): Buffer | undefined; + /** + * See `Session Resumption` for more information. + * @since v0.5.6 + * @return `true` if the session was reused, `false` otherwise. + */ + isSessionReused(): boolean; + /** + * The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process. + * Upon completion, the `callback` function will be passed a single argument + * that is either an `Error` (if the request failed) or `null`. + * + * This method can be used to request a peer's certificate after the secure + * connection has been established. + * + * When running as the server, the socket will be destroyed with an error after`handshakeTimeout` timeout. + * + * For TLSv1.3, renegotiation cannot be initiated, it is not supported by the + * protocol. + * @since v0.11.8 + * @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with + * an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all. + * @return `true` if renegotiation was initiated, `false` otherwise. + */ + renegotiate( + options: { + rejectUnauthorized?: boolean | undefined; + requestCert?: boolean | undefined; + }, + callback: (err: Error | null) => void + ): undefined | boolean; + /** + * The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size. + * Returns `true` if setting the limit succeeded; `false` otherwise. + * + * Smaller fragment sizes decrease the buffering latency on the client: larger + * fragments are buffered by the TLS layer until the entire fragment is received + * and its integrity is verified; large fragments can span multiple roundtrips + * and their processing can be delayed due to packet loss or reordering. However, + * smaller fragments add extra TLS framing bytes and CPU overhead, which may + * decrease overall server throughput. + * @since v0.11.11 + * @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`. + */ + setMaxSendFragment(size: number): boolean; + /** + * Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts + * to renegotiate will trigger an `'error'` event on the `TLSSocket`. + * @since v8.4.0 + */ + disableRenegotiation(): void; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * + * The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by + * OpenSSL's `SSL_trace()` function, the format is undocumented, can change + * without notice, and should not be relied on. + * @since v12.2.0 + */ + enableTrace(): void; + /** + * Returns the peer certificate as an `X509Certificate` object. + * + * If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getPeerX509Certificate(): X509Certificate | undefined; + /** + * Returns the local certificate as an `X509Certificate` object. + * + * If there is no local certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getX509Certificate(): X509Certificate | undefined; + /** + * Keying material is used for validations to prevent different kind of attacks in + * network protocols, for example in the specifications of IEEE 802.1X. + * + * Example + * + * ```js + * const keyingMaterial = tlsSocket.exportKeyingMaterial( + * 128, + * 'client finished'); + * + * /* + * Example return value of keyingMaterial: + * + * + * ``` + * + * See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more + * information. + * @since v13.10.0, v12.17.0 + * @param length number of bytes to retrieve from keying material + * @param label an application specific label, typically this will be a value from the [IANA Exporter Label + * Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + * @param context Optionally provide a context. + * @return requested bytes of the keying material + */ + exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + addListener(event: 'secureConnect', listener: () => void): this; + addListener(event: 'session', listener: (session: Buffer) => void): this; + addListener(event: 'keylog', listener: (line: Buffer) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'OCSPResponse', response: Buffer): boolean; + emit(event: 'secureConnect'): boolean; + emit(event: 'session', session: Buffer): boolean; + emit(event: 'keylog', line: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + on(event: 'secureConnect', listener: () => void): this; + on(event: 'session', listener: (session: Buffer) => void): this; + on(event: 'keylog', listener: (line: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + once(event: 'secureConnect', listener: () => void): this; + once(event: 'session', listener: (session: Buffer) => void): this; + once(event: 'keylog', listener: (line: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + prependListener(event: 'secureConnect', listener: () => void): this; + prependListener(event: 'session', listener: (session: Buffer) => void): this; + prependListener(event: 'keylog', listener: (line: Buffer) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + prependOnceListener(event: 'secureConnect', listener: () => void): this; + prependOnceListener(event: 'session', listener: (session: Buffer) => void): this; + prependOnceListener(event: 'keylog', listener: (line: Buffer) => void): this; + } + interface CommonConnectionOptions { + /** + * An optional TLS context object from tls.createSecureContext() + */ + secureContext?: SecureContext | undefined; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * @default false + */ + enableTrace?: boolean | undefined; + /** + * If true the server will request a certificate from clients that + * connect and attempt to verify that certificate. Defaults to + * false. + */ + requestCert?: boolean | undefined; + /** + * An array of strings or a Buffer naming possible ALPN protocols. + * (Protocols should be ordered by their priority.) + */ + ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined; + /** + * SNICallback(servername, cb) A function that will be + * called if the client supports SNI TLS extension. Two arguments + * will be passed when called: servername and cb. SNICallback should + * invoke cb(null, ctx), where ctx is a SecureContext instance. + * (tls.createSecureContext(...) can be used to get a proper + * SecureContext.) If SNICallback wasn't provided the default callback + * with high-level API will be used (see below). + */ + SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined; + /** + * If true the server will reject any connection which is not + * authorized with the list of supplied CAs. This option only has an + * effect if requestCert is true. + * @default true + */ + rejectUnauthorized?: boolean | undefined; + } + interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts { + /** + * Abort the connection if the SSL/TLS handshake does not finish in the + * specified number of milliseconds. A 'tlsClientError' is emitted on + * the tls.Server object whenever a handshake times out. Default: + * 120000 (120 seconds). + */ + handshakeTimeout?: number | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + */ + ticketKeys?: Buffer | undefined; + /** + * + * @param socket + * @param identity identity parameter sent from the client. + * @return pre-shared key that must either be + * a buffer or `null` to stop the negotiation process. Returned PSK must be + * compatible with the selected cipher's digest. + * + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with the identity provided by the client. + * If the return value is `null` the negotiation process will stop and an + * "unknown_psk_identity" alert message will be sent to the other party. + * If the server wishes to hide the fact that the PSK identity was not known, + * the callback must provide some random data as `psk` to make the connection + * fail with "decrypt_error" before negotiation is finished. + * PSK ciphers are disabled by default, and using TLS-PSK thus + * requires explicitly specifying a cipher suite with the `ciphers` option. + * More information can be found in the RFC 4279. + */ + pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null; + /** + * hint to send to a client to help + * with selecting the identity during TLS-PSK negotiation. Will be ignored + * in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be + * emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code. + */ + pskIdentityHint?: string | undefined; + } + interface PSKCallbackNegotation { + psk: DataView | NodeJS.TypedArray; + identity: string; + } + interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions { + host?: string | undefined; + port?: number | undefined; + path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. + socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket + checkServerIdentity?: typeof checkServerIdentity | undefined; + servername?: string | undefined; // SNI TLS Extension + session?: Buffer | undefined; + minDHSize?: number | undefined; + lookup?: net.LookupFunction | undefined; + timeout?: number | undefined; + /** + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with optional identity `hint` provided by the server or `null` + * in case of TLS 1.3 where `hint` was removed. + * It will be necessary to provide a custom `tls.checkServerIdentity()` + * for the connection as the default one will try to check hostname/IP + * of the server against the certificate but that's not applicable for PSK + * because there won't be a certificate present. + * More information can be found in the RFC 4279. + * + * @param hint message sent from the server to help client + * decide which identity to use during negotiation. + * Always `null` if TLS 1.3 is used. + * @returns Return `null` to stop the negotiation process. `psk` must be + * compatible with the selected cipher's digest. + * `identity` must use UTF-8 encoding. + */ + pskCallback?(hint: string | null): PSKCallbackNegotation | null; + } + /** + * Accepts encrypted connections using TLS or SSL. + * @since v0.3.2 + */ + class Server extends net.Server { + constructor(secureConnectionListener?: (socket: TLSSocket) => void); + constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void); + /** + * The `server.addContext()` method adds a secure context that will be used if + * the client request's SNI name matches the supplied `hostname` (or wildcard). + * + * When there are multiple matching contexts, the most recently added one is + * used. + * @since v0.5.3 + * @param hostname A SNI host name or wildcard (e.g. `'*'`) + * @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + addContext(hostname: string, context: SecureContextOptions): void; + /** + * Returns the session ticket keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @return A 48-byte buffer containing the session ticket keys. + */ + getTicketKeys(): Buffer; + /** + * The `server.setSecureContext()` method replaces the secure context of an + * existing server. Existing connections to the server are not interrupted. + * @since v11.0.0 + * @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + setSecureContext(options: SecureContextOptions): void; + /** + * Sets the session ticket keys. + * + * Changes to the ticket keys are effective only for future server connections. + * Existing or currently pending server connections will use the previous keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @param keys A 48-byte buffer containing the session ticket keys. + */ + setTicketKeys(keys: Buffer): void; + /** + * events.EventEmitter + * 1. tlsClientError + * 2. newSession + * 3. OCSPRequest + * 4. resumeSession + * 5. secureConnection + * 6. keylog + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + addListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + addListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + addListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + addListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + addListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'tlsClientError', err: Error, tlsSocket: TLSSocket): boolean; + emit(event: 'newSession', sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean; + emit(event: 'OCSPRequest', certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void): boolean; + emit(event: 'resumeSession', sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void): boolean; + emit(event: 'secureConnection', tlsSocket: TLSSocket): boolean; + emit(event: 'keylog', line: Buffer, tlsSocket: TLSSocket): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + on(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + on(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + on(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + on(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + on(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + once(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + once(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + once(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + once(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + once(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + prependListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + prependListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + prependListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + prependListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + prependOnceListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + prependOnceListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + prependOnceListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + } + /** + * @deprecated since v0.11.3 Use `tls.TLSSocket` instead. + */ + interface SecurePair { + encrypted: TLSSocket; + cleartext: TLSSocket; + } + type SecureVersion = 'TLSv1.3' | 'TLSv1.2' | 'TLSv1.1' | 'TLSv1'; + interface SecureContextOptions { + /** + * Optionally override the trusted CA certificates. Default is to trust + * the well-known CAs curated by Mozilla. Mozilla's CAs are completely + * replaced when CAs are explicitly specified using this option. + */ + ca?: string | Buffer | Array | undefined; + /** + * Cert chains in PEM format. One cert chain should be provided per + * private key. Each cert chain should consist of the PEM formatted + * certificate for a provided private key, followed by the PEM + * formatted intermediate certificates (if any), in order, and not + * including the root CA (the root CA must be pre-known to the peer, + * see ca). When providing multiple cert chains, they do not have to + * be in the same order as their private keys in key. If the + * intermediate certificates are not provided, the peer will not be + * able to validate the certificate, and the handshake will fail. + */ + cert?: string | Buffer | Array | undefined; + /** + * Colon-separated list of supported signature algorithms. The list + * can contain digest algorithms (SHA256, MD5 etc.), public key + * algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g + * 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512). + */ + sigalgs?: string | undefined; + /** + * Cipher suite specification, replacing the default. For more + * information, see modifying the default cipher suite. Permitted + * ciphers can be obtained via tls.getCiphers(). Cipher names must be + * uppercased in order for OpenSSL to accept them. + */ + ciphers?: string | undefined; + /** + * Name of an OpenSSL engine which can provide the client certificate. + */ + clientCertEngine?: string | undefined; + /** + * PEM formatted CRLs (Certificate Revocation Lists). + */ + crl?: string | Buffer | Array | undefined; + /** + * Diffie Hellman parameters, required for Perfect Forward Secrecy. Use + * openssl dhparam to create the parameters. The key length must be + * greater than or equal to 1024 bits or else an error will be thrown. + * Although 1024 bits is permissible, use 2048 bits or larger for + * stronger security. If omitted or invalid, the parameters are + * silently discarded and DHE ciphers will not be available. + */ + dhparam?: string | Buffer | undefined; + /** + * A string describing a named curve or a colon separated list of curve + * NIDs or names, for example P-521:P-384:P-256, to use for ECDH key + * agreement. Set to auto to select the curve automatically. Use + * crypto.getCurves() to obtain a list of available curve names. On + * recent releases, openssl ecparam -list_curves will also display the + * name and description of each available elliptic curve. Default: + * tls.DEFAULT_ECDH_CURVE. + */ + ecdhCurve?: string | undefined; + /** + * Attempt to use the server's cipher suite preferences instead of the + * client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be + * set in secureOptions + */ + honorCipherOrder?: boolean | undefined; + /** + * Private keys in PEM format. PEM allows the option of private keys + * being encrypted. Encrypted keys will be decrypted with + * options.passphrase. Multiple keys using different algorithms can be + * provided either as an array of unencrypted key strings or buffers, + * or an array of objects in the form {pem: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted keys will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + key?: string | Buffer | Array | undefined; + /** + * Name of an OpenSSL engine to get private key from. Should be used + * together with privateKeyIdentifier. + */ + privateKeyEngine?: string | undefined; + /** + * Identifier of a private key managed by an OpenSSL engine. Should be + * used together with privateKeyEngine. Should not be set together with + * key, because both options define a private key in different ways. + */ + privateKeyIdentifier?: string | undefined; + /** + * Optionally set the maximum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. + * **Default:** `'TLSv1.3'`, unless changed using CLI options. Using + * `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to + * `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used. + */ + maxVersion?: SecureVersion | undefined; + /** + * Optionally set the minimum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. It is not recommended to use + * less than TLSv1.2, but it may be required for interoperability. + * **Default:** `'TLSv1.2'`, unless changed using CLI options. Using + * `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to + * `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used. + */ + minVersion?: SecureVersion | undefined; + /** + * Shared passphrase used for a single private key and/or a PFX. + */ + passphrase?: string | undefined; + /** + * PFX or PKCS12 encoded private key and certificate chain. pfx is an + * alternative to providing key and cert individually. PFX is usually + * encrypted, if it is, passphrase will be used to decrypt it. Multiple + * PFX can be provided either as an array of unencrypted PFX buffers, + * or an array of objects in the form {buf: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted PFX will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + pfx?: string | Buffer | Array | undefined; + /** + * Optionally affect the OpenSSL protocol behavior, which is not + * usually necessary. This should be used carefully if at all! Value is + * a numeric bitmask of the SSL_OP_* options from OpenSSL Options + */ + secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options + /** + * Legacy mechanism to select the TLS protocol version to use, it does + * not support independent control of the minimum and maximum version, + * and does not support limiting the protocol to TLSv1.3. Use + * minVersion and maxVersion instead. The possible values are listed as + * SSL_METHODS, use the function names as strings. For example, use + * 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow + * any TLS protocol version up to TLSv1.3. It is not recommended to use + * TLS versions less than 1.2, but it may be required for + * interoperability. Default: none, see minVersion. + */ + secureProtocol?: string | undefined; + /** + * Opaque identifier used by servers to ensure session state is not + * shared between applications. Unused by clients. + */ + sessionIdContext?: string | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + * See Session Resumption for more information. + */ + ticketKeys?: Buffer | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + } + interface SecureContext { + context: any; + } + /** + * Verifies the certificate `cert` is issued to `hostname`. + * + * Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on + * failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type). + * + * This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as + * such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead. + * + * This function can be overwritten by providing an alternative function as the`options.checkServerIdentity` option that is passed to `tls.connect()`. The + * overwriting function can call `tls.checkServerIdentity()` of course, to augment + * the checks done with additional verification. + * + * This function is only called if the certificate passed all other checks, such as + * being issued by trusted CA (`options.ca`). + * + * Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name + * was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use + * a custom`options.checkServerIdentity` function that implements the desired behavior. + * @since v0.8.4 + * @param hostname The host name or IP address to verify the certificate against. + * @param cert A `certificate object` representing the peer's certificate. + */ + function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined; + /** + * Creates a new {@link Server}. The `secureConnectionListener`, if provided, is + * automatically set as a listener for the `'secureConnection'` event. + * + * The `ticketKeys` options is automatically shared between `cluster` module + * workers. + * + * The following illustrates a simple echo server: + * + * ```js + * const tls = require('tls'); + * const fs = require('fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * + * // This is necessary only if using client certificate authentication. + * requestCert: true, + * + * // This is necessary only if the client uses a self-signed certificate. + * ca: [ fs.readFileSync('client-cert.pem') ] + * }; + * + * const server = tls.createServer(options, (socket) => { + * console.log('server connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * socket.write('welcome!\n'); + * socket.setEncoding('utf8'); + * socket.pipe(socket); + * }); + * server.listen(8000, () => { + * console.log('server bound'); + * }); + * ``` + * + * The server can be tested by connecting to it using the example client from {@link connect}. + * @since v0.3.2 + */ + function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server; + function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; + /** + * The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event. + * + * `tls.connect()` returns a {@link TLSSocket} object. + * + * Unlike the `https` API, `tls.connect()` does not enable the + * SNI (Server Name Indication) extension by default, which may cause some + * servers to return an incorrect certificate or reject the connection + * altogether. To enable SNI, set the `servername` option in addition + * to `host`. + * + * The following illustrates a client for the echo server example from {@link createServer}: + * + * ```js + * // Assumes an echo server that is listening on port 8000. + * const tls = require('tls'); + * const fs = require('fs'); + * + * const options = { + * // Necessary only if the server requires client certificate authentication. + * key: fs.readFileSync('client-key.pem'), + * cert: fs.readFileSync('client-cert.pem'), + * + * // Necessary only if the server uses a self-signed certificate. + * ca: [ fs.readFileSync('server-cert.pem') ], + * + * // Necessary only if the server's cert isn't for "localhost". + * checkServerIdentity: () => { return null; }, + * }; + * + * const socket = tls.connect(8000, options, () => { + * console.log('client connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * process.stdin.pipe(socket); + * process.stdin.resume(); + * }); + * socket.setEncoding('utf8'); + * socket.on('data', (data) => { + * console.log(data); + * }); + * socket.on('end', () => { + * console.log('server ends connection'); + * }); + * ``` + * @since v0.11.3 + */ + function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + /** + * Creates a new secure pair object with two streams, one of which reads and writes + * the encrypted data and the other of which reads and writes the cleartext data. + * Generally, the encrypted stream is piped to/from an incoming encrypted data + * stream and the cleartext one is used as a replacement for the initial encrypted + * stream. + * + * `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and`encrypted` stream properties. + * + * Using `cleartext` has the same API as {@link TLSSocket}. + * + * The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code: + * + * ```js + * pair = tls.createSecurePair(// ... ); + * pair.encrypted.pipe(socket); + * socket.pipe(pair.encrypted); + * ``` + * + * can be replaced by: + * + * ```js + * secureSocket = tls.TLSSocket(socket, options); + * ``` + * + * where `secureSocket` has the same API as `pair.cleartext`. + * @since v0.3.2 + * @deprecated Since v0.11.3 - Use {@link TLSSocket} instead. + * @param context A secure context object as returned by `tls.createSecureContext()` + * @param isServer `true` to specify that this TLS connection should be opened as a server. + * @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. + * @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`. + */ + function createSecurePair(context?: SecureContext, isServer?: boolean, requestCert?: boolean, rejectUnauthorized?: boolean): SecurePair; + /** + * {@link createServer} sets the default value of the `honorCipherOrder` option + * to `true`, other APIs that create secure contexts leave it unset. + * + * {@link createServer} uses a 128 bit truncated SHA1 hash value generated + * from `process.argv` as the default value of the `sessionIdContext` option, other + * APIs that create secure contexts have no default value. + * + * The `tls.createSecureContext()` method creates a `SecureContext` object. It is + * usable as an argument to several `tls` APIs, such as {@link createServer} and `server.addContext()`, but has no public methods. + * + * A key is _required_ for ciphers that use certificates. Either `key` or`pfx` can be used to provide it. + * + * If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of + * CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt). + * @since v0.11.13 + */ + function createSecureContext(options?: SecureContextOptions): SecureContext; + /** + * Returns an array with the names of the supported TLS ciphers. The names are + * lower-case for historical reasons, but must be uppercased to be used in + * the `ciphers` option of {@link createSecureContext}. + * + * Not all supported ciphers are enabled by default. See `Modifying the default TLS cipher suite`. + * + * Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for + * TLSv1.2 and below. + * + * ```js + * console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...] + * ``` + * @since v0.10.2 + */ + function getCiphers(): string[]; + /** + * The default curve name to use for ECDH key agreement in a tls server. + * The default value is 'auto'. See tls.createSecureContext() for further + * information. + */ + let DEFAULT_ECDH_CURVE: string; + /** + * The default value of the maxVersion option of + * tls.createSecureContext(). It can be assigned any of the supported TLS + * protocol versions, 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: + * 'TLSv1.3', unless changed using CLI options. Using --tls-max-v1.2 sets + * the default to 'TLSv1.2'. Using --tls-max-v1.3 sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the highest maximum + * is used. + */ + let DEFAULT_MAX_VERSION: SecureVersion; + /** + * The default value of the minVersion option of tls.createSecureContext(). + * It can be assigned any of the supported TLS protocol versions, + * 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: 'TLSv1.2', unless + * changed using CLI options. Using --tls-min-v1.0 sets the default to + * 'TLSv1'. Using --tls-min-v1.1 sets the default to 'TLSv1.1'. Using + * --tls-min-v1.3 sets the default to 'TLSv1.3'. If multiple of the options + * are provided, the lowest minimum is used. + */ + let DEFAULT_MIN_VERSION: SecureVersion; + /** + * An immutable array of strings representing the root certificates (in PEM + * format) used for verifying peer certificates. This is the default value + * of the ca option to tls.createSecureContext(). + */ + const rootCertificates: ReadonlyArray; +} +declare module 'node:tls' { + export * from 'tls'; +} diff --git a/node_modules/@types/node/trace_events.d.ts b/node_modules/@types/node/trace_events.d.ts new file mode 100755 index 0000000..d47aa93 --- /dev/null +++ b/node_modules/@types/node/trace_events.d.ts @@ -0,0 +1,171 @@ +/** + * The `trace_events` module provides a mechanism to centralize tracing information + * generated by V8, Node.js core, and userspace code. + * + * Tracing can be enabled with the `--trace-event-categories` command-line flag + * or by using the `trace_events` module. The `--trace-event-categories` flag + * accepts a list of comma-separated category names. + * + * The available categories are: + * + * * `node`: An empty placeholder. + * * `node.async_hooks`: Enables capture of detailed `async_hooks` trace data. + * The `async_hooks` events have a unique `asyncId` and a special `triggerId` `triggerAsyncId` property. + * * `node.bootstrap`: Enables capture of Node.js bootstrap milestones. + * * `node.console`: Enables capture of `console.time()` and `console.count()`output. + * * `node.dns.native`: Enables capture of trace data for DNS queries. + * * `node.environment`: Enables capture of Node.js Environment milestones. + * * `node.fs.sync`: Enables capture of trace data for file system sync methods. + * * `node.perf`: Enables capture of `Performance API` measurements. + * * `node.perf.usertiming`: Enables capture of only Performance API User Timing + * measures and marks. + * * `node.perf.timerify`: Enables capture of only Performance API timerify + * measurements. + * * `node.promises.rejections`: Enables capture of trace data tracking the number + * of unhandled Promise rejections and handled-after-rejections. + * * `node.vm.script`: Enables capture of trace data for the `vm` module's`runInNewContext()`, `runInContext()`, and `runInThisContext()` methods. + * * `v8`: The `V8` events are GC, compiling, and execution related. + * + * By default the `node`, `node.async_hooks`, and `v8` categories are enabled. + * + * ```bash + * node --trace-event-categories v8,node,node.async_hooks server.js + * ``` + * + * Prior versions of Node.js required the use of the `--trace-events-enabled`flag to enable trace events. This requirement has been removed. However, the`--trace-events-enabled` flag _may_ still be + * used and will enable the`node`, `node.async_hooks`, and `v8` trace event categories by default. + * + * ```bash + * node --trace-events-enabled + * + * # is equivalent to + * + * node --trace-event-categories v8,node,node.async_hooks + * ``` + * + * Alternatively, trace events may be enabled using the `trace_events` module: + * + * ```js + * const trace_events = require('trace_events'); + * const tracing = trace_events.createTracing({ categories: ['node.perf'] }); + * tracing.enable(); // Enable trace event capture for the 'node.perf' category + * + * // do work + * + * tracing.disable(); // Disable trace event capture for the 'node.perf' category + * ``` + * + * Running Node.js with tracing enabled will produce log files that can be opened + * in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + * + * The logging file is by default called `node_trace.${rotation}.log`, where`${rotation}` is an incrementing log-rotation id. The filepath pattern can + * be specified with `--trace-event-file-pattern` that accepts a template + * string that supports `${rotation}` and `${pid}`: + * + * ```bash + * node --trace-event-categories v8 --trace-event-file-pattern '${pid}-${rotation}.log' server.js + * ``` + * + * To guarantee that the log file is properly generated after signal events like`SIGINT`, `SIGTERM`, or `SIGBREAK`, make sure to have the appropriate handlers + * in your code, such as: + * + * ```js + * process.on('SIGINT', function onSigint() { + * console.info('Received SIGINT.'); + * process.exit(130); // Or applicable exit code depending on OS and signal + * }); + * ``` + * + * The tracing system uses the same time source + * as the one used by `process.hrtime()`. + * However the trace-event timestamps are expressed in microseconds, + * unlike `process.hrtime()` which returns nanoseconds. + * + * The features from this module are not available in `Worker` threads. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/trace_events.js) + */ +declare module 'trace_events' { + /** + * The `Tracing` object is used to enable or disable tracing for sets of + * categories. Instances are created using the + * `trace_events.createTracing()` method. + * + * When created, the `Tracing` object is disabled. Calling the + * `tracing.enable()` method adds the categories to the set of enabled trace + * event categories. Calling `tracing.disable()` will remove the categories + * from the set of enabled trace event categories. + */ + interface Tracing { + /** + * A comma-separated list of the trace event categories covered by this + * `Tracing` object. + */ + readonly categories: string; + /** + * Disables this `Tracing` object. + * + * Only trace event categories _not_ covered by other enabled `Tracing` + * objects and _not_ specified by the `--trace-event-categories` flag + * will be disabled. + */ + disable(): void; + /** + * Enables this `Tracing` object for the set of categories covered by + * the `Tracing` object. + */ + enable(): void; + /** + * `true` only if the `Tracing` object has been enabled. + */ + readonly enabled: boolean; + } + interface CreateTracingOptions { + /** + * An array of trace category names. Values included in the array are + * coerced to a string when possible. An error will be thrown if the + * value cannot be coerced. + */ + categories: string[]; + } + /** + * Creates and returns a `Tracing` object for the given set of `categories`. + * + * ```js + * const trace_events = require('trace_events'); + * const categories = ['node.perf', 'node.async_hooks']; + * const tracing = trace_events.createTracing({ categories }); + * tracing.enable(); + * // do stuff + * tracing.disable(); + * ``` + * @since v10.0.0 + * @return . + */ + function createTracing(options: CreateTracingOptions): Tracing; + /** + * Returns a comma-separated list of all currently-enabled trace event + * categories. The current set of enabled trace event categories is determined + * by the _union_ of all currently-enabled `Tracing` objects and any categories + * enabled using the `--trace-event-categories` flag. + * + * Given the file `test.js` below, the command`node --trace-event-categories node.perf test.js` will print`'node.async_hooks,node.perf'` to the console. + * + * ```js + * const trace_events = require('trace_events'); + * const t1 = trace_events.createTracing({ categories: ['node.async_hooks'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf'] }); + * const t3 = trace_events.createTracing({ categories: ['v8'] }); + * + * t1.enable(); + * t2.enable(); + * + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + function getEnabledCategories(): string | undefined; +} +declare module 'node:trace_events' { + export * from 'trace_events'; +} diff --git a/node_modules/@types/node/ts4.8/assert.d.ts b/node_modules/@types/node/ts4.8/assert.d.ts new file mode 100755 index 0000000..8e02a66 --- /dev/null +++ b/node_modules/@types/node/ts4.8/assert.d.ts @@ -0,0 +1,911 @@ +/** + * The `assert` module provides a set of assertion functions for verifying + * invariants. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/assert.js) + */ +declare module 'assert' { + /** + * An alias of {@link ok}. + * @since v0.5.9 + * @param value The input that is checked for being truthy. + */ + function assert(value: unknown, message?: string | Error): asserts value; + namespace assert { + /** + * Indicates the failure of an assertion. All errors thrown by the `assert` module + * will be instances of the `AssertionError` class. + */ + class AssertionError extends Error { + actual: unknown; + expected: unknown; + operator: string; + generatedMessage: boolean; + code: 'ERR_ASSERTION'; + constructor(options?: { + /** If provided, the error message is set to this value. */ + message?: string | undefined; + /** The `actual` property on the error instance. */ + actual?: unknown | undefined; + /** The `expected` property on the error instance. */ + expected?: unknown | undefined; + /** The `operator` property on the error instance. */ + operator?: string | undefined; + /** If provided, the generated stack trace omits frames before this function. */ + // tslint:disable-next-line:ban-types + stackStartFn?: Function | undefined; + }); + } + /** + * This feature is currently experimental and behavior might still change. + * @since v14.2.0, v12.19.0 + * @experimental + */ + class CallTracker { + /** + * The wrapper function is expected to be called exactly `exact` times. If the + * function has not been called exactly `exact` times when `tracker.verify()` is called, then `tracker.verify()` will throw an + * error. + * + * ```js + * import assert from 'assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func); + * ``` + * @since v14.2.0, v12.19.0 + * @param [fn='A no-op function'] + * @param [exact=1] + * @return that wraps `fn`. + */ + calls(exact?: number): () => void; + calls any>(fn?: Func, exact?: number): Func; + /** + * The arrays contains information about the expected and actual number of calls of + * the functions that have not been called the expected number of times. + * + * ```js + * import assert from 'assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * function foo() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * // Returns an array containing information on callsfunc() + * tracker.report(); + * // [ + * // { + * // message: 'Expected the func function to be executed 2 time(s) but was + * // executed 0 time(s).', + * // actual: 0, + * // expected: 2, + * // operator: 'func', + * // stack: stack trace + * // } + * // ] + * ``` + * @since v14.2.0, v12.19.0 + * @return of objects containing information about the wrapper functions returned by `calls`. + */ + report(): CallTrackerReportInformation[]; + /** + * Iterates through the list of functions passed to `tracker.calls()` and will throw an error for functions that + * have not been called the expected number of times. + * + * ```js + * import assert from 'assert'; + * + * // Creates call tracker. + * const tracker = new assert.CallTracker(); + * + * function func() {} + * + * // Returns a function that wraps func() that must be called exact times + * // before tracker.verify(). + * const callsfunc = tracker.calls(func, 2); + * + * callsfunc(); + * + * // Will throw an error since callsfunc() was only called once. + * tracker.verify(); + * ``` + * @since v14.2.0, v12.19.0 + */ + verify(): void; + } + interface CallTrackerReportInformation { + message: string; + /** The actual number of times the function was called. */ + actual: number; + /** The number of times the function was expected to be called. */ + expected: number; + /** The name of the function that is wrapped. */ + operator: string; + /** A stack trace of the function. */ + stack: object; + } + type AssertPredicate = RegExp | (new () => object) | ((thrown: unknown) => boolean) | object | Error; + /** + * Throws an `AssertionError` with the provided error message or a default + * error message. If the `message` parameter is an instance of an `Error` then + * it will be thrown instead of the `AssertionError`. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.fail(); + * // AssertionError [ERR_ASSERTION]: Failed + * + * assert.fail('boom'); + * // AssertionError [ERR_ASSERTION]: boom + * + * assert.fail(new TypeError('need array')); + * // TypeError: need array + * ``` + * + * Using `assert.fail()` with more than two arguments is possible but deprecated. + * See below for further details. + * @since v0.1.21 + * @param [message='Failed'] + */ + function fail(message?: string | Error): never; + /** @deprecated since v10.0.0 - use fail([message]) or other assert functions instead. */ + function fail( + actual: unknown, + expected: unknown, + message?: string | Error, + operator?: string, + // tslint:disable-next-line:ban-types + stackStartFn?: Function + ): never; + /** + * Tests if `value` is truthy. It is equivalent to`assert.equal(!!value, true, message)`. + * + * If `value` is not truthy, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is `undefined`, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * If no arguments are passed in at all `message` will be set to the string:`` 'No value argument passed to `assert.ok()`' ``. + * + * Be aware that in the `repl` the error message will be different to the one + * thrown in a file! See below for further details. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.ok(true); + * // OK + * assert.ok(1); + * // OK + * + * assert.ok(); + * // AssertionError: No value argument passed to `assert.ok()` + * + * assert.ok(false, 'it\'s false'); + * // AssertionError: it's false + * + * // In the repl: + * assert.ok(typeof 123 === 'string'); + * // AssertionError: false == true + * + * // In a file (e.g. test.js): + * assert.ok(typeof 123 === 'string'); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(typeof 123 === 'string') + * + * assert.ok(false); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(false) + * + * assert.ok(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert.ok(0) + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * // Using `assert()` works the same: + * assert(0); + * // AssertionError: The expression evaluated to a falsy value: + * // + * // assert(0) + * ``` + * @since v0.1.21 + */ + function ok(value: unknown, message?: string | Error): asserts value; + /** + * **Strict assertion mode** + * + * An alias of {@link strictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link strictEqual} instead. + * + * Tests shallow, coercive equality between the `actual` and `expected` parameters + * using the [`==` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Equality). `NaN` is specially handled + * and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'assert'; + * + * assert.equal(1, 1); + * // OK, 1 == 1 + * assert.equal(1, '1'); + * // OK, 1 == '1' + * assert.equal(NaN, NaN); + * // OK + * + * assert.equal(1, 2); + * // AssertionError: 1 == 2 + * assert.equal({ a: { b: 1 } }, { a: { b: 1 } }); + * // AssertionError: { a: { b: 1 } } == { a: { b: 1 } } + * ``` + * + * If the values are not equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default + * error message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function equal(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notStrictEqual} instead. + * + * Tests shallow, coercive inequality with the [`!=` operator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Inequality). `NaN` is + * specially handled and treated as being identical if both sides are `NaN`. + * + * ```js + * import assert from 'assert'; + * + * assert.notEqual(1, 2); + * // OK + * + * assert.notEqual(1, 1); + * // AssertionError: 1 != 1 + * + * assert.notEqual(1, '1'); + * // AssertionError: 1 != '1' + * ``` + * + * If the values are equal, an `AssertionError` is thrown with a `message`property set equal to the value of the `message` parameter. If the `message`parameter is undefined, a default error + * message is assigned. If the `message`parameter is an instance of an `Error` then it will be thrown instead of the`AssertionError`. + * @since v0.1.21 + */ + function notEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link deepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link deepStrictEqual} instead. + * + * Tests for deep equality between the `actual` and `expected` parameters. Consider + * using {@link deepStrictEqual} instead. {@link deepEqual} can have + * surprising results. + * + * _Deep equality_ means that the enumerable "own" properties of child objects + * are also recursively evaluated by the following rules. + * @since v0.1.21 + */ + function deepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * **Strict assertion mode** + * + * An alias of {@link notDeepStrictEqual}. + * + * **Legacy assertion mode** + * + * > Stability: 3 - Legacy: Use {@link notDeepStrictEqual} instead. + * + * Tests for any deep inequality. Opposite of {@link deepEqual}. + * + * ```js + * import assert from 'assert'; + * + * const obj1 = { + * a: { + * b: 1 + * } + * }; + * const obj2 = { + * a: { + * b: 2 + * } + * }; + * const obj3 = { + * a: { + * b: 1 + * } + * }; + * const obj4 = Object.create(obj1); + * + * assert.notDeepEqual(obj1, obj1); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj2); + * // OK + * + * assert.notDeepEqual(obj1, obj3); + * // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } } + * + * assert.notDeepEqual(obj1, obj4); + * // OK + * ``` + * + * If the values are deeply equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a default + * error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notDeepEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests strict equality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'assert/strict'; + * + * assert.strictEqual(1, 2); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + * // 1 !== 2 + * + * assert.strictEqual(1, 1); + * // OK + * + * assert.strictEqual('Hello foobar', 'Hello World!'); + * // AssertionError [ERR_ASSERTION]: Expected inputs to be strictly equal: + * // + actual - expected + * // + * // + 'Hello foobar' + * // - 'Hello World!' + * // ^ + * + * const apples = 1; + * const oranges = 2; + * assert.strictEqual(apples, oranges, `apples ${apples} !== oranges ${oranges}`); + * // AssertionError [ERR_ASSERTION]: apples 1 !== oranges 2 + * + * assert.strictEqual(1, '1', new TypeError('Inputs are not identical')); + * // TypeError: Inputs are not identical + * ``` + * + * If the values are not strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function strictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests strict inequality between the `actual` and `expected` parameters as + * determined by [`Object.is()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). + * + * ```js + * import assert from 'assert/strict'; + * + * assert.notStrictEqual(1, 2); + * // OK + * + * assert.notStrictEqual(1, 1); + * // AssertionError [ERR_ASSERTION]: Expected "actual" to be strictly unequal to: + * // + * // 1 + * + * assert.notStrictEqual(1, '1'); + * // OK + * ``` + * + * If the values are strictly equal, an `AssertionError` is thrown with a`message` property set equal to the value of the `message` parameter. If the`message` parameter is undefined, a + * default error message is assigned. If the`message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v0.1.21 + */ + function notStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Tests for deep equality between the `actual` and `expected` parameters. + * "Deep" equality means that the enumerable "own" properties of child objects + * are recursively evaluated also by the following rules. + * @since v1.2.0 + */ + function deepStrictEqual(actual: unknown, expected: T, message?: string | Error): asserts actual is T; + /** + * Tests for deep strict inequality. Opposite of {@link deepStrictEqual}. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.notDeepStrictEqual({ a: 1 }, { a: '1' }); + * // OK + * ``` + * + * If the values are deeply and strictly equal, an `AssertionError` is thrown + * with a `message` property set equal to the value of the `message` parameter. If + * the `message` parameter is undefined, a default error message is assigned. If + * the `message` parameter is an instance of an `Error` then it will be thrown + * instead of the `AssertionError`. + * @since v1.2.0 + */ + function notDeepStrictEqual(actual: unknown, expected: unknown, message?: string | Error): void; + /** + * Expects the function `fn` to throw an error. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * a validation object where each property will be tested for strict deep equality, + * or an instance of error where each property will be tested for strict deep + * equality including the non-enumerable `message` and `name` properties. When + * using an object, it is also possible to use a regular expression, when + * validating against a string property. See below for examples. + * + * If specified, `message` will be appended to the message provided by the`AssertionError` if the `fn` call fails to throw or in case the error validation + * fails. + * + * Custom validation object/error instance: + * + * ```js + * import assert from 'assert/strict'; + * + * const err = new TypeError('Wrong value'); + * err.code = 404; + * err.foo = 'bar'; + * err.info = { + * nested: true, + * baz: 'text' + * }; + * err.reg = /abc/i; + * + * assert.throws( + * () => { + * throw err; + * }, + * { + * name: 'TypeError', + * message: 'Wrong value', + * info: { + * nested: true, + * baz: 'text' + * } + * // Only properties on the validation object will be tested for. + * // Using nested objects requires all properties to be present. Otherwise + * // the validation is going to fail. + * } + * ); + * + * // Using regular expressions to validate error properties: + * throws( + * () => { + * throw err; + * }, + * { + * // The `name` and `message` properties are strings and using regular + * // expressions on those will match against the string. If they fail, an + * // error is thrown. + * name: /^TypeError$/, + * message: /Wrong/, + * foo: 'bar', + * info: { + * nested: true, + * // It is not possible to use regular expressions for nested properties! + * baz: 'text' + * }, + * // The `reg` property contains a regular expression and only if the + * // validation object contains an identical regular expression, it is going + * // to pass. + * reg: /abc/i + * } + * ); + * + * // Fails due to the different `message` and `name` properties: + * throws( + * () => { + * const otherErr = new Error('Not found'); + * // Copy all enumerable properties from `err` to `otherErr`. + * for (const [key, value] of Object.entries(err)) { + * otherErr[key] = value; + * } + * throw otherErr; + * }, + * // The error's `message` and `name` properties will also be checked when using + * // an error as validation object. + * err + * ); + * ``` + * + * Validate instanceof using constructor: + * + * ```js + * import assert from 'assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * Error + * ); + * ``` + * + * Validate error message using [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions): + * + * Using a regular expression runs `.toString` on the error object, and will + * therefore also include the error name. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * /^Error: Wrong value$/ + * ); + * ``` + * + * Custom error validation: + * + * The function must return `true` to indicate all internal validations passed. + * It will otherwise fail with an `AssertionError`. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.throws( + * () => { + * throw new Error('Wrong value'); + * }, + * (err) => { + * assert(err instanceof Error); + * assert(/value/.test(err)); + * // Avoid returning anything from validation functions besides `true`. + * // Otherwise, it's not clear what part of the validation failed. Instead, + * // throw an error about the specific validation that failed (as done in this + * // example) and add as much helpful debugging information to that error as + * // possible. + * return true; + * }, + * 'unexpected error' + * ); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Using the same + * message as the thrown error message is going to result in an`ERR_AMBIGUOUS_ARGUMENT` error. Please read the example below carefully if using + * a string as the second argument gets considered: + * + * ```js + * import assert from 'assert/strict'; + * + * function throwingFirst() { + * throw new Error('First'); + * } + * + * function throwingSecond() { + * throw new Error('Second'); + * } + * + * function notThrowing() {} + * + * // The second argument is a string and the input function threw an Error. + * // The first case will not throw as it does not match for the error message + * // thrown by the input function! + * assert.throws(throwingFirst, 'Second'); + * // In the next example the message has no benefit over the message from the + * // error and since it is not clear if the user intended to actually match + * // against the error message, Node.js throws an `ERR_AMBIGUOUS_ARGUMENT` error. + * assert.throws(throwingSecond, 'Second'); + * // TypeError [ERR_AMBIGUOUS_ARGUMENT] + * + * // The string is only used (as message) in case the function does not throw: + * assert.throws(notThrowing, 'Second'); + * // AssertionError [ERR_ASSERTION]: Missing expected exception: Second + * + * // If it was intended to match for the error message do this instead: + * // It does not throw because the error messages match. + * assert.throws(throwingSecond, /Second$/); + * + * // If the error message does not match, an AssertionError is thrown. + * assert.throws(throwingFirst, /Second$/); + * // AssertionError [ERR_ASSERTION] + * ``` + * + * Due to the confusing error-prone notation, avoid a string as the second + * argument. + * @since v0.1.21 + */ + function throws(block: () => unknown, message?: string | Error): void; + function throws(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Asserts that the function `fn` does not throw an error. + * + * Using `assert.doesNotThrow()` is actually not useful because there + * is no benefit in catching an error and then rethrowing it. Instead, consider + * adding a comment next to the specific code path that should not throw and keep + * error messages as expressive as possible. + * + * When `assert.doesNotThrow()` is called, it will immediately call the `fn`function. + * + * If an error is thrown and it is the same type as that specified by the `error`parameter, then an `AssertionError` is thrown. If the error is of a + * different type, or if the `error` parameter is undefined, the error is + * propagated back to the caller. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) or a validation + * function. See {@link throws} for more details. + * + * The following, for instance, will throw the `TypeError` because there is no + * matching error type in the assertion: + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError + * ); + * ``` + * + * However, the following will result in an `AssertionError` with the message + * 'Got unwanted exception...': + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * TypeError + * ); + * ``` + * + * If an `AssertionError` is thrown and a value is provided for the `message`parameter, the value of `message` will be appended to the `AssertionError` message: + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotThrow( + * () => { + * throw new TypeError('Wrong value'); + * }, + * /Wrong value/, + * 'Whoops' + * ); + * // Throws: AssertionError: Got unwanted exception: Whoops + * ``` + * @since v0.1.21 + */ + function doesNotThrow(block: () => unknown, message?: string | Error): void; + function doesNotThrow(block: () => unknown, error: AssertPredicate, message?: string | Error): void; + /** + * Throws `value` if `value` is not `undefined` or `null`. This is useful when + * testing the `error` argument in callbacks. The stack trace contains all frames + * from the error passed to `ifError()` including the potential new frames for`ifError()` itself. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.ifError(null); + * // OK + * assert.ifError(0); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 0 + * assert.ifError('error'); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: 'error' + * assert.ifError(new Error()); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: Error + * + * // Create some random error frames. + * let err; + * (function errorFrame() { + * err = new Error('test error'); + * })(); + * + * (function ifErrorFrame() { + * assert.ifError(err); + * })(); + * // AssertionError [ERR_ASSERTION]: ifError got unwanted exception: test error + * // at ifErrorFrame + * // at errorFrame + * ``` + * @since v0.1.97 + */ + function ifError(value: unknown): asserts value is null | undefined; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.rejects()` will return a rejected `Promise` with that error. If the + * function does not return a promise, `assert.rejects()` will return a rejected`Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases the error + * handler is skipped. + * + * Besides the async nature to await the completion behaves identically to {@link throws}. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions), a validation function, + * an object where each property will be tested for, or an instance of error where + * each property will be tested for including the non-enumerable `message` and`name` properties. + * + * If specified, `message` will be the message provided by the `AssertionError` if the `asyncFn` fails to reject. + * + * ```js + * import assert from 'assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * { + * name: 'TypeError', + * message: 'Wrong value' + * } + * ); + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * await assert.rejects( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * (err) => { + * assert.strictEqual(err.name, 'TypeError'); + * assert.strictEqual(err.message, 'Wrong value'); + * return true; + * } + * ); + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * assert.rejects( + * Promise.reject(new Error('Wrong value')), + * Error + * ).then(() => { + * // ... + * }); + * ``` + * + * `error` cannot be a string. If a string is provided as the second + * argument, then `error` is assumed to be omitted and the string will be used for`message` instead. This can lead to easy-to-miss mistakes. Please read the + * example in {@link throws} carefully if using a string as the second + * argument gets considered. + * @since v10.0.0 + */ + function rejects(block: (() => Promise) | Promise, message?: string | Error): Promise; + function rejects(block: (() => Promise) | Promise, error: AssertPredicate, message?: string | Error): Promise; + /** + * Awaits the `asyncFn` promise or, if `asyncFn` is a function, immediately + * calls the function and awaits the returned promise to complete. It will then + * check that the promise is not rejected. + * + * If `asyncFn` is a function and it throws an error synchronously,`assert.doesNotReject()` will return a rejected `Promise` with that error. If + * the function does not return a promise, `assert.doesNotReject()` will return a + * rejected `Promise` with an `ERR_INVALID_RETURN_VALUE` error. In both cases + * the error handler is skipped. + * + * Using `assert.doesNotReject()` is actually not useful because there is little + * benefit in catching a rejection and then rejecting it again. Instead, consider + * adding a comment next to the specific code path that should not reject and keep + * error messages as expressive as possible. + * + * If specified, `error` can be a [`Class`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), + * [`RegExp`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions) or a validation + * function. See {@link throws} for more details. + * + * Besides the async nature to await the completion behaves identically to {@link doesNotThrow}. + * + * ```js + * import assert from 'assert/strict'; + * + * await assert.doesNotReject( + * async () => { + * throw new TypeError('Wrong value'); + * }, + * SyntaxError + * ); + * ``` + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotReject(Promise.reject(new TypeError('Wrong value'))) + * .then(() => { + * // ... + * }); + * ``` + * @since v10.0.0 + */ + function doesNotReject(block: (() => Promise) | Promise, message?: string | Error): Promise; + function doesNotReject(block: (() => Promise) | Promise, error: AssertPredicate, message?: string | Error): Promise; + /** + * Expects the `string` input to match the regular expression. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.match('I will fail', /pass/); + * // AssertionError [ERR_ASSERTION]: The input did not match the regular ... + * + * assert.match(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.match('I will pass', /pass/); + * // OK + * ``` + * + * If the values do not match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function match(value: string, regExp: RegExp, message?: string | Error): void; + /** + * Expects the `string` input not to match the regular expression. + * + * ```js + * import assert from 'assert/strict'; + * + * assert.doesNotMatch('I will fail', /fail/); + * // AssertionError [ERR_ASSERTION]: The input was expected to not match the ... + * + * assert.doesNotMatch(123, /pass/); + * // AssertionError [ERR_ASSERTION]: The "string" argument must be of type string. + * + * assert.doesNotMatch('I will pass', /different/); + * // OK + * ``` + * + * If the values do match, or if the `string` argument is of another type than`string`, an `AssertionError` is thrown with a `message` property set equal + * to the value of the `message` parameter. If the `message` parameter is + * undefined, a default error message is assigned. If the `message` parameter is an + * instance of an `Error` then it will be thrown instead of the `AssertionError`. + * @since v13.6.0, v12.16.0 + */ + function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void; + const strict: Omit & { + (value: unknown, message?: string | Error): asserts value; + equal: typeof strictEqual; + notEqual: typeof notStrictEqual; + deepEqual: typeof deepStrictEqual; + notDeepEqual: typeof notDeepStrictEqual; + // Mapped types and assertion functions are incompatible? + // TS2775: Assertions require every name in the call target + // to be declared with an explicit type annotation. + ok: typeof ok; + strictEqual: typeof strictEqual; + deepStrictEqual: typeof deepStrictEqual; + ifError: typeof ifError; + strict: typeof strict; + }; + } + export = assert; +} +declare module 'node:assert' { + import assert = require('assert'); + export = assert; +} diff --git a/node_modules/@types/node/ts4.8/assert/strict.d.ts b/node_modules/@types/node/ts4.8/assert/strict.d.ts new file mode 100755 index 0000000..b4319b9 --- /dev/null +++ b/node_modules/@types/node/ts4.8/assert/strict.d.ts @@ -0,0 +1,8 @@ +declare module 'assert/strict' { + import { strict } from 'node:assert'; + export = strict; +} +declare module 'node:assert/strict' { + import { strict } from 'node:assert'; + export = strict; +} diff --git a/node_modules/@types/node/ts4.8/async_hooks.d.ts b/node_modules/@types/node/ts4.8/async_hooks.d.ts new file mode 100755 index 0000000..0bf4739 --- /dev/null +++ b/node_modules/@types/node/ts4.8/async_hooks.d.ts @@ -0,0 +1,501 @@ +/** + * The `async_hooks` module provides an API to track asynchronous resources. It + * can be accessed using: + * + * ```js + * import async_hooks from 'async_hooks'; + * ``` + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/async_hooks.js) + */ +declare module 'async_hooks' { + /** + * ```js + * import { executionAsyncId } from 'async_hooks'; + * + * console.log(executionAsyncId()); // 1 - bootstrap + * fs.open(path, 'r', (err, fd) => { + * console.log(executionAsyncId()); // 6 - open() + * }); + * ``` + * + * The ID returned from `executionAsyncId()` is related to execution timing, not + * causality (which is covered by `triggerAsyncId()`): + * + * ```js + * const server = net.createServer((conn) => { + * // Returns the ID of the server, not of the new connection, because the + * // callback runs in the execution scope of the server's MakeCallback(). + * async_hooks.executionAsyncId(); + * + * }).listen(port, () => { + * // Returns the ID of a TickObject (process.nextTick()) because all + * // callbacks passed to .listen() are wrapped in a nextTick(). + * async_hooks.executionAsyncId(); + * }); + * ``` + * + * Promise contexts may not get precise `executionAsyncIds` by default. + * See the section on `promise execution tracking`. + * @since v8.1.0 + * @return The `asyncId` of the current execution context. Useful to track when something calls. + */ + function executionAsyncId(): number; + /** + * Resource objects returned by `executionAsyncResource()` are most often internal + * Node.js handle objects with undocumented APIs. Using any functions or properties + * on the object is likely to crash your application and should be avoided. + * + * Using `executionAsyncResource()` in the top-level execution context will + * return an empty object as there is no handle or request object to use, + * but having an object representing the top-level can be helpful. + * + * ```js + * import { open } from 'fs'; + * import { executionAsyncId, executionAsyncResource } from 'async_hooks'; + * + * console.log(executionAsyncId(), executionAsyncResource()); // 1 {} + * open(new URL(import.meta.url), 'r', (err, fd) => { + * console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap + * }); + * ``` + * + * This can be used to implement continuation local storage without the + * use of a tracking `Map` to store the metadata: + * + * ```js + * import { createServer } from 'http'; + * import { + * executionAsyncId, + * executionAsyncResource, + * createHook + * } from 'async_hooks'; + * const sym = Symbol('state'); // Private symbol to avoid pollution + * + * createHook({ + * init(asyncId, type, triggerAsyncId, resource) { + * const cr = executionAsyncResource(); + * if (cr) { + * resource[sym] = cr[sym]; + * } + * } + * }).enable(); + * + * const server = createServer((req, res) => { + * executionAsyncResource()[sym] = { state: req.url }; + * setTimeout(function() { + * res.end(JSON.stringify(executionAsyncResource()[sym])); + * }, 100); + * }).listen(3000); + * ``` + * @since v13.9.0, v12.17.0 + * @return The resource representing the current execution. Useful to store data within the resource. + */ + function executionAsyncResource(): object; + /** + * ```js + * const server = net.createServer((conn) => { + * // The resource that caused (or triggered) this callback to be called + * // was that of the new connection. Thus the return value of triggerAsyncId() + * // is the asyncId of "conn". + * async_hooks.triggerAsyncId(); + * + * }).listen(port, () => { + * // Even though all callbacks passed to .listen() are wrapped in a nextTick() + * // the callback itself exists because the call to the server's .listen() + * // was made. So the return value would be the ID of the server. + * async_hooks.triggerAsyncId(); + * }); + * ``` + * + * Promise contexts may not get valid `triggerAsyncId`s by default. See + * the section on `promise execution tracking`. + * @return The ID of the resource responsible for calling the callback that is currently being executed. + */ + function triggerAsyncId(): number; + interface HookCallbacks { + /** + * Called when a class is constructed that has the possibility to emit an asynchronous event. + * @param asyncId a unique ID for the async resource + * @param type the type of the async resource + * @param triggerAsyncId the unique ID of the async resource in whose execution context this async resource was created + * @param resource reference to the resource representing the async operation, needs to be released during destroy + */ + init?(asyncId: number, type: string, triggerAsyncId: number, resource: object): void; + /** + * When an asynchronous operation is initiated or completes a callback is called to notify the user. + * The before callback is called just before said callback is executed. + * @param asyncId the unique identifier assigned to the resource about to execute the callback. + */ + before?(asyncId: number): void; + /** + * Called immediately after the callback specified in before is completed. + * @param asyncId the unique identifier assigned to the resource which has executed the callback. + */ + after?(asyncId: number): void; + /** + * Called when a promise has resolve() called. This may not be in the same execution id + * as the promise itself. + * @param asyncId the unique id for the promise that was resolve()d. + */ + promiseResolve?(asyncId: number): void; + /** + * Called after the resource corresponding to asyncId is destroyed + * @param asyncId a unique ID for the async resource + */ + destroy?(asyncId: number): void; + } + interface AsyncHook { + /** + * Enable the callbacks for a given AsyncHook instance. If no callbacks are provided enabling is a noop. + */ + enable(): this; + /** + * Disable the callbacks for a given AsyncHook instance from the global pool of AsyncHook callbacks to be executed. Once a hook has been disabled it will not be called again until enabled. + */ + disable(): this; + } + /** + * Registers functions to be called for different lifetime events of each async + * operation. + * + * The callbacks `init()`/`before()`/`after()`/`destroy()` are called for the + * respective asynchronous event during a resource's lifetime. + * + * All callbacks are optional. For example, if only resource cleanup needs to + * be tracked, then only the `destroy` callback needs to be passed. The + * specifics of all functions that can be passed to `callbacks` is in the `Hook Callbacks` section. + * + * ```js + * import { createHook } from 'async_hooks'; + * + * const asyncHook = createHook({ + * init(asyncId, type, triggerAsyncId, resource) { }, + * destroy(asyncId) { } + * }); + * ``` + * + * The callbacks will be inherited via the prototype chain: + * + * ```js + * class MyAsyncCallbacks { + * init(asyncId, type, triggerAsyncId, resource) { } + * destroy(asyncId) {} + * } + * + * class MyAddedCallbacks extends MyAsyncCallbacks { + * before(asyncId) { } + * after(asyncId) { } + * } + * + * const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); + * ``` + * + * Because promises are asynchronous resources whose lifecycle is tracked + * via the async hooks mechanism, the `init()`, `before()`, `after()`, and`destroy()` callbacks _must not_ be async functions that return promises. + * @since v8.1.0 + * @param callbacks The `Hook Callbacks` to register + * @return Instance used for disabling and enabling hooks + */ + function createHook(callbacks: HookCallbacks): AsyncHook; + interface AsyncResourceOptions { + /** + * The ID of the execution context that created this async event. + * @default executionAsyncId() + */ + triggerAsyncId?: number | undefined; + /** + * Disables automatic `emitDestroy` when the object is garbage collected. + * This usually does not need to be set (even if `emitDestroy` is called + * manually), unless the resource's `asyncId` is retrieved and the + * sensitive API's `emitDestroy` is called with it. + * @default false + */ + requireManualDestroy?: boolean | undefined; + } + /** + * The class `AsyncResource` is designed to be extended by the embedder's async + * resources. Using this, users can easily trigger the lifetime events of their + * own resources. + * + * The `init` hook will trigger when an `AsyncResource` is instantiated. + * + * The following is an overview of the `AsyncResource` API. + * + * ```js + * import { AsyncResource, executionAsyncId } from 'async_hooks'; + * + * // AsyncResource() is meant to be extended. Instantiating a + * // new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * // async_hook.executionAsyncId() is used. + * const asyncResource = new AsyncResource( + * type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false } + * ); + * + * // Run a function in the execution context of the resource. This will + * // * establish the context of the resource + * // * trigger the AsyncHooks before callbacks + * // * call the provided function `fn` with the supplied arguments + * // * trigger the AsyncHooks after callbacks + * // * restore the original execution context + * asyncResource.runInAsyncScope(fn, thisArg, ...args); + * + * // Call AsyncHooks destroy callbacks. + * asyncResource.emitDestroy(); + * + * // Return the unique ID assigned to the AsyncResource instance. + * asyncResource.asyncId(); + * + * // Return the trigger ID for the AsyncResource instance. + * asyncResource.triggerAsyncId(); + * ``` + */ + class AsyncResource { + /** + * AsyncResource() is meant to be extended. Instantiating a + * new AsyncResource() also triggers init. If triggerAsyncId is omitted then + * async_hook.executionAsyncId() is used. + * @param type The type of async event. + * @param triggerAsyncId The ID of the execution context that created + * this async event (default: `executionAsyncId()`), or an + * AsyncResourceOptions object (since v9.3.0) + */ + constructor(type: string, triggerAsyncId?: number | AsyncResourceOptions); + /** + * Binds the given function to the current execution context. + * + * The returned function will have an `asyncResource` property referencing + * the `AsyncResource` to which the function is bound. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current execution context. + * @param type An optional name to associate with the underlying `AsyncResource`. + */ + static bind any, ThisArg>( + fn: Func, + type?: string, + thisArg?: ThisArg + ): Func & { + asyncResource: AsyncResource; + }; + /** + * Binds the given function to execute to this `AsyncResource`'s scope. + * + * The returned function will have an `asyncResource` property referencing + * the `AsyncResource` to which the function is bound. + * @since v14.8.0, v12.19.0 + * @param fn The function to bind to the current `AsyncResource`. + */ + bind any>( + fn: Func + ): Func & { + asyncResource: AsyncResource; + }; + /** + * Call the provided function with the provided arguments in the execution context + * of the async resource. This will establish the context, trigger the AsyncHooks + * before callbacks, call the function, trigger the AsyncHooks after callbacks, and + * then restore the original execution context. + * @since v9.6.0 + * @param fn The function to call in the execution context of this async resource. + * @param thisArg The receiver to be used for the function call. + * @param args Optional arguments to pass to the function. + */ + runInAsyncScope(fn: (this: This, ...args: any[]) => Result, thisArg?: This, ...args: any[]): Result; + /** + * Call all `destroy` hooks. This should only ever be called once. An error will + * be thrown if it is called more than once. This **must** be manually called. If + * the resource is left to be collected by the GC then the `destroy` hooks will + * never be called. + * @return A reference to `asyncResource`. + */ + emitDestroy(): this; + /** + * @return The unique `asyncId` assigned to the resource. + */ + asyncId(): number; + /** + * + * @return The same `triggerAsyncId` that is passed to the `AsyncResource` constructor. + */ + triggerAsyncId(): number; + } + /** + * This class creates stores that stay coherent through asynchronous operations. + * + * While you can create your own implementation on top of the `async_hooks` module,`AsyncLocalStorage` should be preferred as it is a performant and memory safe + * implementation that involves significant optimizations that are non-obvious to + * implement. + * + * The following example uses `AsyncLocalStorage` to build a simple logger + * that assigns IDs to incoming HTTP requests and includes them in messages + * logged within each request. + * + * ```js + * import http from 'http'; + * import { AsyncLocalStorage } from 'async_hooks'; + * + * const asyncLocalStorage = new AsyncLocalStorage(); + * + * function logWithId(msg) { + * const id = asyncLocalStorage.getStore(); + * console.log(`${id !== undefined ? id : '-'}:`, msg); + * } + * + * let idSeq = 0; + * http.createServer((req, res) => { + * asyncLocalStorage.run(idSeq++, () => { + * logWithId('start'); + * // Imagine any chain of async operations here + * setImmediate(() => { + * logWithId('finish'); + * res.end(); + * }); + * }); + * }).listen(8080); + * + * http.get('http://localhost:8080'); + * http.get('http://localhost:8080'); + * // Prints: + * // 0: start + * // 1: start + * // 0: finish + * // 1: finish + * ``` + * + * Each instance of `AsyncLocalStorage` maintains an independent storage context. + * Multiple instances can safely exist simultaneously without risk of interfering + * with each other's data. + * @since v13.10.0, v12.17.0 + */ + class AsyncLocalStorage { + /** + * Disables the instance of `AsyncLocalStorage`. All subsequent calls + * to `asyncLocalStorage.getStore()` will return `undefined` until`asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()` is called again. + * + * When calling `asyncLocalStorage.disable()`, all current contexts linked to the + * instance will be exited. + * + * Calling `asyncLocalStorage.disable()` is required before the`asyncLocalStorage` can be garbage collected. This does not apply to stores + * provided by the `asyncLocalStorage`, as those objects are garbage collected + * along with the corresponding async resources. + * + * Use this method when the `asyncLocalStorage` is not in use anymore + * in the current process. + * @since v13.10.0, v12.17.0 + * @experimental + */ + disable(): void; + /** + * Returns the current store. + * If called outside of an asynchronous context initialized by + * calling `asyncLocalStorage.run()` or `asyncLocalStorage.enterWith()`, it + * returns `undefined`. + * @since v13.10.0, v12.17.0 + */ + getStore(): T | undefined; + /** + * Runs a function synchronously within a context and returns its + * return value. The store is not accessible outside of the callback function. + * The store is accessible to any asynchronous operations created within the + * callback. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `run()` too. + * The stacktrace is not impacted by this call and the context is exited. + * + * Example: + * + * ```js + * const store = { id: 2 }; + * try { + * asyncLocalStorage.run(store, () => { + * asyncLocalStorage.getStore(); // Returns the store object + * setTimeout(() => { + * asyncLocalStorage.getStore(); // Returns the store object + * }, 200); + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns undefined + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + */ + run(store: T, callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Runs a function synchronously outside of a context and returns its + * return value. The store is not accessible within the callback function or + * the asynchronous operations created within the callback. Any `getStore()`call done within the callback function will always return `undefined`. + * + * The optional `args` are passed to the callback function. + * + * If the callback function throws an error, the error is thrown by `exit()` too. + * The stacktrace is not impacted by this call and the context is re-entered. + * + * Example: + * + * ```js + * // Within a call to run + * try { + * asyncLocalStorage.getStore(); // Returns the store object or value + * asyncLocalStorage.exit(() => { + * asyncLocalStorage.getStore(); // Returns undefined + * throw new Error(); + * }); + * } catch (e) { + * asyncLocalStorage.getStore(); // Returns the same object or value + * // The error will be caught here + * } + * ``` + * @since v13.10.0, v12.17.0 + * @experimental + */ + exit(callback: (...args: TArgs) => R, ...args: TArgs): R; + /** + * Transitions into the context for the remainder of the current + * synchronous execution and then persists the store through any following + * asynchronous calls. + * + * Example: + * + * ```js + * const store = { id: 1 }; + * // Replaces previous store with the given store object + * asyncLocalStorage.enterWith(store); + * asyncLocalStorage.getStore(); // Returns the store object + * someAsyncOperation(() => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * ``` + * + * This transition will continue for the _entire_ synchronous execution. + * This means that if, for example, the context is entered within an event + * handler subsequent event handlers will also run within that context unless + * specifically bound to another context with an `AsyncResource`. That is why`run()` should be preferred over `enterWith()` unless there are strong reasons + * to use the latter method. + * + * ```js + * const store = { id: 1 }; + * + * emitter.on('my-event', () => { + * asyncLocalStorage.enterWith(store); + * }); + * emitter.on('my-event', () => { + * asyncLocalStorage.getStore(); // Returns the same object + * }); + * + * asyncLocalStorage.getStore(); // Returns undefined + * emitter.emit('my-event'); + * asyncLocalStorage.getStore(); // Returns the same object + * ``` + * @since v13.11.0, v12.17.0 + * @experimental + */ + enterWith(store: T): void; + } +} +declare module 'node:async_hooks' { + export * from 'async_hooks'; +} diff --git a/node_modules/@types/node/ts4.8/buffer.d.ts b/node_modules/@types/node/ts4.8/buffer.d.ts new file mode 100755 index 0000000..ea859cd --- /dev/null +++ b/node_modules/@types/node/ts4.8/buffer.d.ts @@ -0,0 +1,2259 @@ +/** + * `Buffer` objects are used to represent a fixed-length sequence of bytes. Many + * Node.js APIs support `Buffer`s. + * + * The `Buffer` class is a subclass of JavaScript's [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) class and + * extends it with methods that cover additional use cases. Node.js APIs accept + * plain [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) s wherever `Buffer`s are supported as well. + * + * While the `Buffer` class is available within the global scope, it is still + * recommended to explicitly reference it via an import or require statement. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Creates a zero-filled Buffer of length 10. + * const buf1 = Buffer.alloc(10); + * + * // Creates a Buffer of length 10, + * // filled with bytes which all have the value `1`. + * const buf2 = Buffer.alloc(10, 1); + * + * // Creates an uninitialized buffer of length 10. + * // This is faster than calling Buffer.alloc() but the returned + * // Buffer instance might contain old data that needs to be + * // overwritten using fill(), write(), or other functions that fill the Buffer's + * // contents. + * const buf3 = Buffer.allocUnsafe(10); + * + * // Creates a Buffer containing the bytes [1, 2, 3]. + * const buf4 = Buffer.from([1, 2, 3]); + * + * // Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries + * // are all truncated using `(value & 255)` to fit into the range 0–255. + * const buf5 = Buffer.from([257, 257.5, -255, '1']); + * + * // Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': + * // [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) + * // [116, 195, 169, 115, 116] (in decimal notation) + * const buf6 = Buffer.from('tést'); + * + * // Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. + * const buf7 = Buffer.from('tést', 'latin1'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/buffer.js) + */ +declare module 'buffer' { + import { BinaryLike } from 'node:crypto'; + import { ReadableStream as WebReadableStream } from 'node:stream/web'; + export const INSPECT_MAX_BYTES: number; + export const kMaxLength: number; + export const kStringMaxLength: number; + export const constants: { + MAX_LENGTH: number; + MAX_STRING_LENGTH: number; + }; + export type TranscodeEncoding = 'ascii' | 'utf8' | 'utf16le' | 'ucs2' | 'latin1' | 'binary'; + /** + * Re-encodes the given `Buffer` or `Uint8Array` instance from one character + * encoding to another. Returns a new `Buffer` instance. + * + * Throws if the `fromEnc` or `toEnc` specify invalid character encodings or if + * conversion from `fromEnc` to `toEnc` is not permitted. + * + * Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`,`'utf16le'`, `'ucs2'`, `'latin1'`, and `'binary'`. + * + * The transcoding process will use substitution characters if a given byte + * sequence cannot be adequately represented in the target encoding. For instance: + * + * ```js + * import { Buffer, transcode } from 'buffer'; + * + * const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); + * console.log(newBuf.toString('ascii')); + * // Prints: '?' + * ``` + * + * Because the Euro (`€`) sign is not representable in US-ASCII, it is replaced + * with `?` in the transcoded `Buffer`. + * @since v7.1.0 + * @param source A `Buffer` or `Uint8Array` instance. + * @param fromEnc The current encoding. + * @param toEnc To target encoding. + */ + export function transcode(source: Uint8Array, fromEnc: TranscodeEncoding, toEnc: TranscodeEncoding): Buffer; + export const SlowBuffer: { + /** @deprecated since v6.0.0, use `Buffer.allocUnsafeSlow()` */ + new (size: number): Buffer; + prototype: Buffer; + }; + /** + * Resolves a `'blob:nodedata:...'` an associated `Blob` object registered using + * a prior call to `URL.createObjectURL()`. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + export function resolveObjectURL(id: string): Blob | undefined; + export { Buffer }; + /** + * @experimental + */ + export interface BlobOptions { + /** + * @default 'utf8' + */ + encoding?: BufferEncoding | undefined; + /** + * The Blob content-type. The intent is for `type` to convey + * the MIME media type of the data, however no validation of the type format + * is performed. + */ + type?: string | undefined; + } + /** + * A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) encapsulates immutable, raw data that can be safely shared across + * multiple worker threads. + * @since v15.7.0, v14.18.0 + */ + export class Blob { + /** + * The total size of the `Blob` in bytes. + * @since v15.7.0, v14.18.0 + */ + readonly size: number; + /** + * The content-type of the `Blob`. + * @since v15.7.0, v14.18.0 + */ + readonly type: string; + /** + * Creates a new `Blob` object containing a concatenation of the given sources. + * + * {ArrayBuffer}, {TypedArray}, {DataView}, and {Buffer} sources are copied into + * the 'Blob' and can therefore be safely modified after the 'Blob' is created. + * + * String sources are also copied into the `Blob`. + */ + constructor(sources: Array, options?: BlobOptions); + /** + * Returns a promise that fulfills with an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) containing a copy of + * the `Blob` data. + * @since v15.7.0, v14.18.0 + */ + arrayBuffer(): Promise; + /** + * Creates and returns a new `Blob` containing a subset of this `Blob` objects + * data. The original `Blob` is not altered. + * @since v15.7.0, v14.18.0 + * @param start The starting index. + * @param end The ending index. + * @param type The content-type for the new `Blob` + */ + slice(start?: number, end?: number, type?: string): Blob; + /** + * Returns a promise that fulfills with the contents of the `Blob` decoded as a + * UTF-8 string. + * @since v15.7.0, v14.18.0 + */ + text(): Promise; + /** + * Returns a new (WHATWG) `ReadableStream` that allows the content of the `Blob` to be read. + * @since v16.7.0 + */ + stream(): WebReadableStream; + } + export import atob = globalThis.atob; + export import btoa = globalThis.btoa; + + import { Blob as NodeBlob } from 'buffer'; + // This conditional type will be the existing global Blob in a browser, or + // the copy below in a Node environment. + type __Blob = typeof globalThis extends { onmessage: any, Blob: any } + ? {} : NodeBlob; + + global { + // Buffer class + type BufferEncoding = 'ascii' | 'utf8' | 'utf-8' | 'utf16le' | 'ucs2' | 'ucs-2' | 'base64' | 'base64url' | 'latin1' | 'binary' | 'hex'; + type WithImplicitCoercion = + | T + | { + valueOf(): T; + }; + /** + * Raw data is stored in instances of the Buffer class. + * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. + * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'base64url'|'binary'(deprecated)|'hex' + */ + interface BufferConstructor { + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. + */ + new (str: string, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). + */ + new (size: number): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new (array: Uint8Array): Buffer; + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}/{SharedArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. + */ + new (arrayBuffer: ArrayBuffer | SharedArrayBuffer): Buffer; + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. + */ + new (array: ReadonlyArray): Buffer; + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. + */ + new (buffer: Buffer): Buffer; + /** + * Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. + * Array entries outside that range will be truncated to fit into it. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. + * const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); + * ``` + * + * A `TypeError` will be thrown if `array` is not an `Array` or another type + * appropriate for `Buffer.from()` variants. + * + * `Buffer.from(array)` and `Buffer.from(string)` may also use the internal`Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v5.10.0 + */ + from(arrayBuffer: WithImplicitCoercion, byteOffset?: number, length?: number): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param data data to create a new Buffer + */ + from(data: Uint8Array | ReadonlyArray): Buffer; + from(data: WithImplicitCoercion | string>): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + */ + from( + str: + | WithImplicitCoercion + | { + [Symbol.toPrimitive](hint: 'string'): string; + }, + encoding?: BufferEncoding + ): Buffer; + /** + * Creates a new Buffer using the passed {data} + * @param values to create a new Buffer + */ + of(...items: number[]): Buffer; + /** + * Returns `true` if `obj` is a `Buffer`, `false` otherwise. + * + * ```js + * import { Buffer } from 'buffer'; + * + * Buffer.isBuffer(Buffer.alloc(10)); // true + * Buffer.isBuffer(Buffer.from('foo')); // true + * Buffer.isBuffer('a string'); // false + * Buffer.isBuffer([]); // false + * Buffer.isBuffer(new Uint8Array(1024)); // false + * ``` + * @since v0.1.101 + */ + isBuffer(obj: any): obj is Buffer; + /** + * Returns `true` if `encoding` is the name of a supported character encoding, + * or `false` otherwise. + * + * ```js + * import { Buffer } from 'buffer'; + * + * console.log(Buffer.isEncoding('utf8')); + * // Prints: true + * + * console.log(Buffer.isEncoding('hex')); + * // Prints: true + * + * console.log(Buffer.isEncoding('utf/8')); + * // Prints: false + * + * console.log(Buffer.isEncoding('')); + * // Prints: false + * ``` + * @since v0.9.1 + * @param encoding A character encoding name to check. + */ + isEncoding(encoding: string): encoding is BufferEncoding; + /** + * Returns the byte length of a string when encoded using `encoding`. + * This is not the same as [`String.prototype.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), which does not account + * for the encoding that is used to convert the string into bytes. + * + * For `'base64'`, `'base64url'`, and `'hex'`, this function assumes valid input. + * For strings that contain non-base64/hex-encoded data (e.g. whitespace), the + * return value might be greater than the length of a `Buffer` created from the + * string. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const str = '\u00bd + \u00bc = \u00be'; + * + * console.log(`${str}: ${str.length} characters, ` + + * `${Buffer.byteLength(str, 'utf8')} bytes`); + * // Prints: ½ + ¼ = ¾: 9 characters, 12 bytes + * ``` + * + * When `string` is a + * `Buffer`/[`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView)/[`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/- + * Reference/Global_Objects/TypedArray)/[`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer)/[`SharedArrayBuffer`](https://develop- + * er.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer), the byte length as reported by `.byteLength`is returned. + * @since v0.1.90 + * @param string A value to calculate the length of. + * @param [encoding='utf8'] If `string` is a string, this is its encoding. + * @return The number of bytes contained within `string`. + */ + byteLength(string: string | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, encoding?: BufferEncoding): number; + /** + * Returns a new `Buffer` which is the result of concatenating all the `Buffer`instances in the `list` together. + * + * If the list has no items, or if the `totalLength` is 0, then a new zero-length`Buffer` is returned. + * + * If `totalLength` is not provided, it is calculated from the `Buffer` instances + * in `list` by adding their lengths. + * + * If `totalLength` is provided, it is coerced to an unsigned integer. If the + * combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is + * truncated to `totalLength`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create a single `Buffer` from a list of three `Buffer` instances. + * + * const buf1 = Buffer.alloc(10); + * const buf2 = Buffer.alloc(14); + * const buf3 = Buffer.alloc(18); + * const totalLength = buf1.length + buf2.length + buf3.length; + * + * console.log(totalLength); + * // Prints: 42 + * + * const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + * + * console.log(bufA); + * // Prints: + * console.log(bufA.length); + * // Prints: 42 + * ``` + * + * `Buffer.concat()` may also use the internal `Buffer` pool like `Buffer.allocUnsafe()` does. + * @since v0.7.11 + * @param list List of `Buffer` or {@link Uint8Array} instances to concatenate. + * @param totalLength Total length of the `Buffer` instances in `list` when concatenated. + */ + concat(list: ReadonlyArray, totalLength?: number): Buffer; + /** + * Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of`Buffer` instances. This is equivalent to calling `buf1.compare(buf2)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('1234'); + * const buf2 = Buffer.from('0123'); + * const arr = [buf1, buf2]; + * + * console.log(arr.sort(Buffer.compare)); + * // Prints: [ , ] + * // (This result is equal to: [buf2, buf1].) + * ``` + * @since v0.11.13 + * @return Either `-1`, `0`, or `1`, depending on the result of the comparison. See `compare` for details. + */ + compare(buf1: Uint8Array, buf2: Uint8Array): -1 | 0 | 1; + /** + * Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the`Buffer` will be zero-filled. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(5); + * + * console.log(buf); + * // Prints: + * ``` + * + * If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. + * + * If `fill` is specified, the allocated `Buffer` will be initialized by calling `buf.fill(fill)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(5, 'a'); + * + * console.log(buf); + * // Prints: + * ``` + * + * If both `fill` and `encoding` are specified, the allocated `Buffer` will be + * initialized by calling `buf.fill(fill, encoding)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + * + * console.log(buf); + * // Prints: + * ``` + * + * Calling `Buffer.alloc()` can be measurably slower than the alternative `Buffer.allocUnsafe()` but ensures that the newly created `Buffer` instance + * contents will never contain sensitive data from previous allocations, including + * data that might not have been allocated for `Buffer`s. + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + * @param [fill=0] A value to pre-fill the new `Buffer` with. + * @param [encoding='utf8'] If `fill` is a string, this is its encoding. + */ + alloc(size: number, fill?: string | Buffer | number, encoding?: BufferEncoding): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `Buffer.alloc()` instead to initialize`Buffer` instances with zeroes. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(10); + * + * console.log(buf); + * // Prints (contents may vary): + * + * buf.fill(0); + * + * console.log(buf); + * // Prints: + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * + * The `Buffer` module pre-allocates an internal `Buffer` instance of + * size `Buffer.poolSize` that is used as a pool for the fast allocation of new`Buffer` instances created using `Buffer.allocUnsafe()`,`Buffer.from(array)`, `Buffer.concat()`, and the + * deprecated`new Buffer(size)` constructor only when `size` is less than or equal + * to `Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). + * + * Use of this pre-allocated internal memory pool is a key difference between + * calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. + * Specifically, `Buffer.alloc(size, fill)` will _never_ use the internal `Buffer`pool, while `Buffer.allocUnsafe(size).fill(fill)`_will_ use the internal`Buffer` pool if `size` is less + * than or equal to half `Buffer.poolSize`. The + * difference is subtle but can be important when an application requires the + * additional performance that `Buffer.allocUnsafe()` provides. + * @since v5.10.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafe(size: number): Buffer; + /** + * Allocates a new `Buffer` of `size` bytes. If `size` is larger than {@link constants.MAX_LENGTH} or smaller than 0, `ERR_INVALID_ARG_VALUE` is thrown. A zero-length `Buffer` is created + * if `size` is 0. + * + * The underlying memory for `Buffer` instances created in this way is _not_ + * _initialized_. The contents of the newly created `Buffer` are unknown and _may contain sensitive data_. Use `buf.fill(0)` to initialize + * such `Buffer` instances with zeroes. + * + * When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, + * allocations under 4 KB are sliced from a single pre-allocated `Buffer`. This + * allows applications to avoid the garbage collection overhead of creating many + * individually allocated `Buffer` instances. This approach improves both + * performance and memory usage by eliminating the need to track and clean up as + * many individual `ArrayBuffer` objects. + * + * However, in the case where a developer may need to retain a small chunk of + * memory from a pool for an indeterminate amount of time, it may be appropriate + * to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and + * then copying out the relevant bits. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Need to keep around a few small chunks of memory. + * const store = []; + * + * socket.on('readable', () => { + * let data; + * while (null !== (data = readable.read())) { + * // Allocate for retained data. + * const sb = Buffer.allocUnsafeSlow(10); + * + * // Copy the data into the new allocation. + * data.copy(sb, 0, 0, 10); + * + * store.push(sb); + * } + * }); + * ``` + * + * A `TypeError` will be thrown if `size` is not a number. + * @since v5.12.0 + * @param size The desired length of the new `Buffer`. + */ + allocUnsafeSlow(size: number): Buffer; + /** + * This is the size (in bytes) of pre-allocated internal `Buffer` instances used + * for pooling. This value may be modified. + * @since v0.11.3 + */ + poolSize: number; + } + interface Buffer extends Uint8Array { + /** + * Writes `string` to `buf` at `offset` according to the character encoding in`encoding`. The `length` parameter is the number of bytes to write. If `buf` did + * not contain enough space to fit the entire string, only part of `string` will be + * written. However, partially encoded characters will not be written. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.alloc(256); + * + * const len = buf.write('\u00bd + \u00bc = \u00be', 0); + * + * console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); + * // Prints: 12 bytes: ½ + ¼ = ¾ + * + * const buffer = Buffer.alloc(10); + * + * const length = buffer.write('abcd', 8); + * + * console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); + * // Prints: 2 bytes : ab + * ``` + * @since v0.1.90 + * @param string String to write to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write `string`. + * @param [length=buf.length - offset] Maximum number of bytes to write (written bytes will not exceed `buf.length - offset`). + * @param [encoding='utf8'] The character encoding of `string`. + * @return Number of bytes written. + */ + write(string: string, encoding?: BufferEncoding): number; + write(string: string, offset: number, encoding?: BufferEncoding): number; + write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; + /** + * Decodes `buf` to a string according to the specified character encoding in`encoding`. `start` and `end` may be passed to decode only a subset of `buf`. + * + * If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8, + * then each invalid byte is replaced with the replacement character `U+FFFD`. + * + * The maximum length of a string instance (in UTF-16 code units) is available + * as {@link constants.MAX_STRING_LENGTH}. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * console.log(buf1.toString('utf8')); + * // Prints: abcdefghijklmnopqrstuvwxyz + * console.log(buf1.toString('utf8', 0, 5)); + * // Prints: abcde + * + * const buf2 = Buffer.from('tést'); + * + * console.log(buf2.toString('hex')); + * // Prints: 74c3a97374 + * console.log(buf2.toString('utf8', 0, 3)); + * // Prints: té + * console.log(buf2.toString(undefined, 0, 3)); + * // Prints: té + * ``` + * @since v0.1.90 + * @param [encoding='utf8'] The character encoding to use. + * @param [start=0] The byte offset to start decoding at. + * @param [end=buf.length] The byte offset to stop decoding at (not inclusive). + */ + toString(encoding?: BufferEncoding, start?: number, end?: number): string; + /** + * Returns a JSON representation of `buf`. [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) implicitly calls + * this function when stringifying a `Buffer` instance. + * + * `Buffer.from()` accepts objects in the format returned from this method. + * In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); + * const json = JSON.stringify(buf); + * + * console.log(json); + * // Prints: {"type":"Buffer","data":[1,2,3,4,5]} + * + * const copy = JSON.parse(json, (key, value) => { + * return value && value.type === 'Buffer' ? + * Buffer.from(value) : + * value; + * }); + * + * console.log(copy); + * // Prints: + * ``` + * @since v0.9.2 + */ + toJSON(): { + type: 'Buffer'; + data: number[]; + }; + /** + * Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,`false` otherwise. Equivalent to `buf.compare(otherBuffer) === 0`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('414243', 'hex'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.equals(buf2)); + * // Prints: true + * console.log(buf1.equals(buf3)); + * // Prints: false + * ``` + * @since v0.11.13 + * @param otherBuffer A `Buffer` or {@link Uint8Array} with which to compare `buf`. + */ + equals(otherBuffer: Uint8Array): boolean; + /** + * Compares `buf` with `target` and returns a number indicating whether `buf`comes before, after, or is the same as `target` in sort order. + * Comparison is based on the actual sequence of bytes in each `Buffer`. + * + * * `0` is returned if `target` is the same as `buf` + * * `1` is returned if `target` should come _before_`buf` when sorted. + * * `-1` is returned if `target` should come _after_`buf` when sorted. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('ABC'); + * const buf2 = Buffer.from('BCD'); + * const buf3 = Buffer.from('ABCD'); + * + * console.log(buf1.compare(buf1)); + * // Prints: 0 + * console.log(buf1.compare(buf2)); + * // Prints: -1 + * console.log(buf1.compare(buf3)); + * // Prints: -1 + * console.log(buf2.compare(buf1)); + * // Prints: 1 + * console.log(buf2.compare(buf3)); + * // Prints: 1 + * console.log([buf1, buf2, buf3].sort(Buffer.compare)); + * // Prints: [ , , ] + * // (This result is equal to: [buf1, buf3, buf2].) + * ``` + * + * The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd`arguments can be used to limit the comparison to specific ranges within `target`and `buf` respectively. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); + * const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + * + * console.log(buf1.compare(buf2, 5, 9, 0, 4)); + * // Prints: 0 + * console.log(buf1.compare(buf2, 0, 6, 4)); + * // Prints: -1 + * console.log(buf1.compare(buf2, 5, 6, 5)); + * // Prints: 1 + * ``` + * + * `ERR_OUT_OF_RANGE` is thrown if `targetStart < 0`, `sourceStart < 0`,`targetEnd > target.byteLength`, or `sourceEnd > source.byteLength`. + * @since v0.11.13 + * @param target A `Buffer` or {@link Uint8Array} with which to compare `buf`. + * @param [targetStart=0] The offset within `target` at which to begin comparison. + * @param [targetEnd=target.length] The offset within `target` at which to end comparison (not inclusive). + * @param [sourceStart=0] The offset within `buf` at which to begin comparison. + * @param [sourceEnd=buf.length] The offset within `buf` at which to end comparison (not inclusive). + */ + compare(target: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): -1 | 0 | 1; + /** + * Copies data from a region of `buf` to a region in `target`, even if the `target`memory region overlaps with `buf`. + * + * [`TypedArray.prototype.set()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set) performs the same operation, and is available + * for all TypedArrays, including Node.js `Buffer`s, although it takes + * different function arguments. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create two `Buffer` instances. + * const buf1 = Buffer.allocUnsafe(26); + * const buf2 = Buffer.allocUnsafe(26).fill('!'); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. + * buf1.copy(buf2, 8, 16, 20); + * // This is equivalent to: + * // buf2.set(buf1.subarray(16, 20), 8); + * + * console.log(buf2.toString('ascii', 0, 25)); + * // Prints: !!!!!!!!qrst!!!!!!!!!!!!! + * ``` + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create a `Buffer` and copy data from one region to an overlapping region + * // within the same `Buffer`. + * + * const buf = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf[i] = i + 97; + * } + * + * buf.copy(buf, 0, 4, 10); + * + * console.log(buf.toString()); + * // Prints: efghijghijklmnopqrstuvwxyz + * ``` + * @since v0.1.90 + * @param target A `Buffer` or {@link Uint8Array} to copy into. + * @param [targetStart=0] The offset within `target` at which to begin writing. + * @param [sourceStart=0] The offset within `buf` from which to begin copying. + * @param [sourceEnd=buf.length] The offset within `buf` at which to stop copying (not inclusive). + * @return The number of bytes copied. + */ + copy(target: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * This method is not compatible with the `Uint8Array.prototype.slice()`, + * which is a superclass of `Buffer`. To copy the slice, use`Uint8Array.prototype.slice()`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * const copiedBuf = Uint8Array.prototype.slice.call(buf); + * copiedBuf[0]++; + * console.log(copiedBuf.toString()); + * // Prints: cuffer + * + * console.log(buf.toString()); + * // Prints: buffer + * + * // With buf.slice(), the original buffer is modified. + * const notReallyCopiedBuf = buf.slice(); + * notReallyCopiedBuf[0]++; + * console.log(notReallyCopiedBuf.toString()); + * // Prints: cuffer + * console.log(buf.toString()); + * // Also prints: cuffer (!) + * ``` + * @since v0.3.0 + * @deprecated Use `subarray` instead. + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + slice(start?: number, end?: number): Buffer; + /** + * Returns a new `Buffer` that references the same memory as the original, but + * offset and cropped by the `start` and `end` indices. + * + * Specifying `end` greater than `buf.length` will return the same result as + * that of `end` equal to `buf.length`. + * + * This method is inherited from [`TypedArray.prototype.subarray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray). + * + * Modifying the new `Buffer` slice will modify the memory in the original `Buffer`because the allocated memory of the two objects overlap. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte + * // from the original `Buffer`. + * + * const buf1 = Buffer.allocUnsafe(26); + * + * for (let i = 0; i < 26; i++) { + * // 97 is the decimal ASCII value for 'a'. + * buf1[i] = i + 97; + * } + * + * const buf2 = buf1.subarray(0, 3); + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: abc + * + * buf1[0] = 33; + * + * console.log(buf2.toString('ascii', 0, buf2.length)); + * // Prints: !bc + * ``` + * + * Specifying negative indexes causes the slice to be generated relative to the + * end of `buf` rather than the beginning. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * console.log(buf.subarray(-6, -1).toString()); + * // Prints: buffe + * // (Equivalent to buf.subarray(0, 5).) + * + * console.log(buf.subarray(-6, -2).toString()); + * // Prints: buff + * // (Equivalent to buf.subarray(0, 4).) + * + * console.log(buf.subarray(-5, -2).toString()); + * // Prints: uff + * // (Equivalent to buf.subarray(1, 4).) + * ``` + * @since v3.0.0 + * @param [start=0] Where the new `Buffer` will start. + * @param [end=buf.length] Where the new `Buffer` will end (not inclusive). + */ + subarray(start?: number, end?: number): Buffer; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64BE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigInt64LE(0x0102030405060708n, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigInt64LE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. + * + * This function is also available under the `writeBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64BE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64BE(value: bigint, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + * + * console.log(buf); + * // Prints: + * ``` + * + * This function is also available under the `writeBigUint64LE` alias. + * @since v12.0.0, v10.20.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeBigUInt64LE(value: bigint, offset?: number): number; + /** + * @alias Buffer.writeBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + writeBigUint64LE(value: bigint, offset?: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintLE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntLE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntLE + * @since v14.9.0, v12.19.0 + */ + writeUintLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than an unsigned integer. + * + * This function is also available under the `writeUintBE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeUIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeUIntBE(value: number, offset: number, byteLength: number): number; + /** + * @alias Buffer.writeUIntBE + * @since v14.9.0, v12.19.0 + */ + writeUintBE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined + * when `value` is anything other than a signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntLE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntLE(value: number, offset: number, byteLength: number): number; + /** + * Writes `byteLength` bytes of `value` to `buf` at the specified `offset`as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when`value` is anything other than a + * signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(6); + * + * buf.writeIntBE(0x1234567890ab, 0, 6); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param offset Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to write. Must satisfy `0 < byteLength <= 6`. + * @return `offset` plus the number of bytes written. + */ + writeIntBE(value: number, offset: number, byteLength: number): number; + /** + * Reads an unsigned, big-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64BE(0)); + * // Prints: 4294967295n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64BE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64BE + * @since v14.10.0, v12.19.0 + */ + readBigUint64BE(offset?: number): bigint; + /** + * Reads an unsigned, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readBigUint64LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + * + * console.log(buf.readBigUInt64LE(0)); + * // Prints: 18446744069414584320n + * ``` + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigUInt64LE(offset?: number): bigint; + /** + * @alias Buffer.readBigUInt64LE + * @since v14.10.0, v12.19.0 + */ + readBigUint64LE(offset?: number): bigint; + /** + * Reads a signed, big-endian 64-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a signed, little-endian 64-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed + * values. + * @since v12.0.0, v10.20.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy: `0 <= offset <= buf.length - 8`. + */ + readBigInt64LE(offset?: number): bigint; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned, little-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintLE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntLE(0, 6).toString(16)); + * // Prints: ab9078563412 + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntLE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntLE + * @since v14.9.0, v12.19.0 + */ + readUintLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as an unsigned big-endian integer supporting + * up to 48 bits of accuracy. + * + * This function is also available under the `readUintBE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readUIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readUIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readUIntBE(offset: number, byteLength: number): number; + /** + * @alias Buffer.readUIntBE + * @since v14.9.0, v12.19.0 + */ + readUintBE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a little-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntLE(0, 6).toString(16)); + * // Prints: -546f87a9cbee + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntLE(offset: number, byteLength: number): number; + /** + * Reads `byteLength` number of bytes from `buf` at the specified `offset`and interprets the result as a big-endian, two's complement signed value + * supporting up to 48 bits of accuracy. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + * + * console.log(buf.readIntBE(0, 6).toString(16)); + * // Prints: 1234567890ab + * console.log(buf.readIntBE(1, 6).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * console.log(buf.readIntBE(1, 0).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param offset Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - byteLength`. + * @param byteLength Number of bytes to read. Must satisfy `0 < byteLength <= 6`. + */ + readIntBE(offset: number, byteLength: number): number; + /** + * Reads an unsigned 8-bit integer from `buf` at the specified `offset`. + * + * This function is also available under the `readUint8` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, -2]); + * + * console.log(buf.readUInt8(0)); + * // Prints: 1 + * console.log(buf.readUInt8(1)); + * // Prints: 254 + * console.log(buf.readUInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readUInt8(offset?: number): number; + /** + * @alias Buffer.readUInt8 + * @since v14.9.0, v12.19.0 + */ + readUint8(offset?: number): number; + /** + * Reads an unsigned, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16LE(0).toString(16)); + * // Prints: 3412 + * console.log(buf.readUInt16LE(1).toString(16)); + * // Prints: 5634 + * console.log(buf.readUInt16LE(2).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16LE(offset?: number): number; + /** + * @alias Buffer.readUInt16LE + * @since v14.9.0, v12.19.0 + */ + readUint16LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 16-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint16BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56]); + * + * console.log(buf.readUInt16BE(0).toString(16)); + * // Prints: 1234 + * console.log(buf.readUInt16BE(1).toString(16)); + * // Prints: 3456 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readUInt16BE(offset?: number): number; + /** + * @alias Buffer.readUInt16BE + * @since v14.9.0, v12.19.0 + */ + readUint16BE(offset?: number): number; + /** + * Reads an unsigned, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32LE(0).toString(16)); + * // Prints: 78563412 + * console.log(buf.readUInt32LE(1).toString(16)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32LE(offset?: number): number; + /** + * @alias Buffer.readUInt32LE + * @since v14.9.0, v12.19.0 + */ + readUint32LE(offset?: number): number; + /** + * Reads an unsigned, big-endian 32-bit integer from `buf` at the specified`offset`. + * + * This function is also available under the `readUint32BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + * + * console.log(buf.readUInt32BE(0).toString(16)); + * // Prints: 12345678 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readUInt32BE(offset?: number): number; + /** + * @alias Buffer.readUInt32BE + * @since v14.9.0, v12.19.0 + */ + readUint32BE(offset?: number): number; + /** + * Reads a signed 8-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([-1, 5]); + * + * console.log(buf.readInt8(0)); + * // Prints: -1 + * console.log(buf.readInt8(1)); + * // Prints: 5 + * console.log(buf.readInt8(2)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.0 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 1`. + */ + readInt8(offset?: number): number; + /** + * Reads a signed, little-endian 16-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16LE(0)); + * // Prints: 1280 + * console.log(buf.readInt16LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16LE(offset?: number): number; + /** + * Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 5]); + * + * console.log(buf.readInt16BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 2`. + */ + readInt16BE(offset?: number): number; + /** + * Reads a signed, little-endian 32-bit integer from `buf` at the specified`offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32LE(0)); + * // Prints: 83886080 + * console.log(buf.readInt32LE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32LE(offset?: number): number; + /** + * Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. + * + * Integers read from a `Buffer` are interpreted as two's complement signed values. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([0, 0, 0, 5]); + * + * console.log(buf.readInt32BE(0)); + * // Prints: 5 + * ``` + * @since v0.5.5 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readInt32BE(offset?: number): number; + /** + * Reads a 32-bit, little-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatLE(0)); + * // Prints: 1.539989614439558e-36 + * console.log(buf.readFloatLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatLE(offset?: number): number; + /** + * Reads a 32-bit, big-endian float from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4]); + * + * console.log(buf.readFloatBE(0)); + * // Prints: 2.387939260590663e-38 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 4`. + */ + readFloatBE(offset?: number): number; + /** + * Reads a 64-bit, little-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleLE(0)); + * // Prints: 5.447603722011605e-270 + * console.log(buf.readDoubleLE(1)); + * // Throws ERR_OUT_OF_RANGE. + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleLE(offset?: number): number; + /** + * Reads a 64-bit, big-endian double from `buf` at the specified `offset`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + * + * console.log(buf.readDoubleBE(0)); + * // Prints: 8.20788039913184e-304 + * ``` + * @since v0.11.15 + * @param [offset=0] Number of bytes to skip before starting to read. Must satisfy `0 <= offset <= buf.length - 8`. + */ + readDoubleBE(offset?: number): number; + reverse(): this; + /** + * Interprets `buf` as an array of unsigned 16-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 2. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap16(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap16(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * + * One convenient use of `buf.swap16()` is to perform a fast in-place conversion + * between UTF-16 little-endian and UTF-16 big-endian: + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); + * buf.swap16(); // Convert to big-endian UTF-16 text. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap16(): Buffer; + /** + * Interprets `buf` as an array of unsigned 32-bit integers and swaps the + * byte order _in-place_. Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 4. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap32(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap32(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v5.10.0 + * @return A reference to `buf`. + */ + swap32(): Buffer; + /** + * Interprets `buf` as an array of 64-bit numbers and swaps byte order _in-place_. + * Throws `ERR_INVALID_BUFFER_SIZE` if `buf.length` is not a multiple of 8. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + * + * console.log(buf1); + * // Prints: + * + * buf1.swap64(); + * + * console.log(buf1); + * // Prints: + * + * const buf2 = Buffer.from([0x1, 0x2, 0x3]); + * + * buf2.swap64(); + * // Throws ERR_INVALID_BUFFER_SIZE. + * ``` + * @since v6.3.0 + * @return A reference to `buf`. + */ + swap64(): Buffer; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a + * valid unsigned 8-bit integer. Behavior is undefined when `value` is anything + * other than an unsigned 8-bit integer. + * + * This function is also available under the `writeUint8` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt8(0x3, 0); + * buf.writeUInt8(0x4, 1); + * buf.writeUInt8(0x23, 2); + * buf.writeUInt8(0x42, 3); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeUInt8(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt8 + * @since v14.9.0, v12.19.0 + */ + writeUint8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 16-bit integer. + * + * This function is also available under the `writeUint16LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16LE(0xdead, 0); + * buf.writeUInt16LE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16LE + * @since v14.9.0, v12.19.0 + */ + writeUint16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 16-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 16-bit integer. + * + * This function is also available under the `writeUint16BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt16BE(0xdead, 0); + * buf.writeUInt16BE(0xbeef, 2); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeUInt16BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt16BE + * @since v14.9.0, v12.19.0 + */ + writeUint16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value` is + * anything other than an unsigned 32-bit integer. + * + * This function is also available under the `writeUint32LE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32LE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32LE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32LE + * @since v14.9.0, v12.19.0 + */ + writeUint32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid unsigned 32-bit integer. Behavior is undefined when `value`is anything other than an + * unsigned 32-bit integer. + * + * This function is also available under the `writeUint32BE` alias. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeUInt32BE(0xfeedface, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeUInt32BE(value: number, offset?: number): number; + /** + * @alias Buffer.writeUInt32BE + * @since v14.9.0, v12.19.0 + */ + writeUint32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset`. `value` must be a valid + * signed 8-bit integer. Behavior is undefined when `value` is anything other than + * a signed 8-bit integer. + * + * `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt8(2, 0); + * buf.writeInt8(-2, 1); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.0 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 1`. + * @return `offset` plus the number of bytes written. + */ + writeInt8(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16LE(0x0304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 16-bit integer. Behavior is undefined when `value` is + * anything other than a signed 16-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(2); + * + * buf.writeInt16BE(0x0102, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 2`. + * @return `offset` plus the number of bytes written. + */ + writeInt16BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32LE(0x05060708, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32LE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a valid signed 32-bit integer. Behavior is undefined when `value` is + * anything other than a signed 32-bit integer. + * + * The `value` is interpreted and written as a two's complement signed integer. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeInt32BE(0x01020304, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.5.5 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeInt32BE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatLE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is + * undefined when `value` is anything other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(4); + * + * buf.writeFloatBE(0xcafebabe, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 4`. + * @return `offset` plus the number of bytes written. + */ + writeFloatBE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as little-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleLE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleLE(value: number, offset?: number): number; + /** + * Writes `value` to `buf` at the specified `offset` as big-endian. The `value`must be a JavaScript number. Behavior is undefined when `value` is anything + * other than a JavaScript number. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(8); + * + * buf.writeDoubleBE(123.456, 0); + * + * console.log(buf); + * // Prints: + * ``` + * @since v0.11.15 + * @param value Number to be written to `buf`. + * @param [offset=0] Number of bytes to skip before starting to write. Must satisfy `0 <= offset <= buf.length - 8`. + * @return `offset` plus the number of bytes written. + */ + writeDoubleBE(value: number, offset?: number): number; + /** + * Fills `buf` with the specified `value`. If the `offset` and `end` are not given, + * the entire `buf` will be filled: + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Fill a `Buffer` with the ASCII character 'h'. + * + * const b = Buffer.allocUnsafe(50).fill('h'); + * + * console.log(b.toString()); + * // Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh + * ``` + * + * `value` is coerced to a `uint32` value if it is not a string, `Buffer`, or + * integer. If the resulting integer is greater than `255` (decimal), `buf` will be + * filled with `value & 255`. + * + * If the final write of a `fill()` operation falls on a multi-byte character, + * then only the bytes of that character that fit into `buf` are written: + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Fill a `Buffer` with character that takes up two bytes in UTF-8. + * + * console.log(Buffer.allocUnsafe(5).fill('\u0222')); + * // Prints: + * ``` + * + * If `value` contains invalid characters, it is truncated; if no valid + * fill data remains, an exception is thrown: + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.allocUnsafe(5); + * + * console.log(buf.fill('a')); + * // Prints: + * console.log(buf.fill('aazz', 'hex')); + * // Prints: + * console.log(buf.fill('zz', 'hex')); + * // Throws an exception. + * ``` + * @since v0.5.0 + * @param value The value with which to fill `buf`. + * @param [offset=0] Number of bytes to skip before starting to fill `buf`. + * @param [end=buf.length] Where to stop filling `buf` (not inclusive). + * @param [encoding='utf8'] The encoding for `value` if `value` is a string. + * @return A reference to `buf`. + */ + fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; + /** + * If `value` is: + * + * * a string, `value` is interpreted according to the character encoding in`encoding`. + * * a `Buffer` or [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), `value` will be used in its entirety. + * To compare a partial `Buffer`, use `buf.subarray`. + * * a number, `value` will be interpreted as an unsigned 8-bit integer + * value between `0` and `255`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.indexOf('this')); + * // Prints: 0 + * console.log(buf.indexOf('is')); + * // Prints: 2 + * console.log(buf.indexOf(Buffer.from('a buffer'))); + * // Prints: 8 + * console.log(buf.indexOf(97)); + * // Prints: 8 (97 is the decimal ASCII value for 'a') + * console.log(buf.indexOf(Buffer.from('a buffer example'))); + * // Prints: -1 + * console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: 8 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); + * // Prints: 4 + * console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); + * // Prints: 6 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. If the result + * of coercion is `NaN` or `0`, then the entire buffer will be searched. This + * behavior matches [`String.prototype.indexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/indexOf). + * + * ```js + * import { Buffer } from 'buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.indexOf(99.9)); + * console.log(b.indexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN or 0. + * // Prints: 1, searching the whole buffer. + * console.log(b.indexOf('b', undefined)); + * console.log(b.indexOf('b', {})); + * console.log(b.indexOf('b', null)); + * console.log(b.indexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer` and `byteOffset` is less + * than `buf.length`, `byteOffset` will be returned. If `value` is empty and`byteOffset` is at least `buf.length`, `buf.length` will be returned. + * @since v1.5.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the first occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Identical to `buf.indexOf()`, except the last occurrence of `value` is found + * rather than the first occurrence. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('this buffer is a buffer'); + * + * console.log(buf.lastIndexOf('this')); + * // Prints: 0 + * console.log(buf.lastIndexOf('buffer')); + * // Prints: 17 + * console.log(buf.lastIndexOf(Buffer.from('buffer'))); + * // Prints: 17 + * console.log(buf.lastIndexOf(97)); + * // Prints: 15 (97 is the decimal ASCII value for 'a') + * console.log(buf.lastIndexOf(Buffer.from('yolo'))); + * // Prints: -1 + * console.log(buf.lastIndexOf('buffer', 5)); + * // Prints: 5 + * console.log(buf.lastIndexOf('buffer', 4)); + * // Prints: -1 + * + * const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + * + * console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); + * // Prints: 6 + * console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); + * // Prints: 4 + * ``` + * + * If `value` is not a string, number, or `Buffer`, this method will throw a`TypeError`. If `value` is a number, it will be coerced to a valid byte value, + * an integer between 0 and 255. + * + * If `byteOffset` is not a number, it will be coerced to a number. Any arguments + * that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. + * This behavior matches [`String.prototype.lastIndexOf()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf). + * + * ```js + * import { Buffer } from 'buffer'; + * + * const b = Buffer.from('abcdef'); + * + * // Passing a value that's a number, but not a valid byte. + * // Prints: 2, equivalent to searching for 99 or 'c'. + * console.log(b.lastIndexOf(99.9)); + * console.log(b.lastIndexOf(256 + 99)); + * + * // Passing a byteOffset that coerces to NaN. + * // Prints: 1, searching the whole buffer. + * console.log(b.lastIndexOf('b', undefined)); + * console.log(b.lastIndexOf('b', {})); + * + * // Passing a byteOffset that coerces to 0. + * // Prints: -1, equivalent to passing 0. + * console.log(b.lastIndexOf('b', null)); + * console.log(b.lastIndexOf('b', [])); + * ``` + * + * If `value` is an empty string or empty `Buffer`, `byteOffset` will be returned. + * @since v6.0.0 + * @param value What to search for. + * @param [byteOffset=buf.length - 1] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is the encoding used to determine the binary representation of the string that will be searched for in `buf`. + * @return The index of the last occurrence of `value` in `buf`, or `-1` if `buf` does not contain `value`. + */ + lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `[index, byte]` pairs from the contents + * of `buf`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * // Log the entire contents of a `Buffer`. + * + * const buf = Buffer.from('buffer'); + * + * for (const pair of buf.entries()) { + * console.log(pair); + * } + * // Prints: + * // [0, 98] + * // [1, 117] + * // [2, 102] + * // [3, 102] + * // [4, 101] + * // [5, 114] + * ``` + * @since v1.1.0 + */ + entries(): IterableIterator<[number, number]>; + /** + * Equivalent to `buf.indexOf() !== -1`. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('this is a buffer'); + * + * console.log(buf.includes('this')); + * // Prints: true + * console.log(buf.includes('is')); + * // Prints: true + * console.log(buf.includes(Buffer.from('a buffer'))); + * // Prints: true + * console.log(buf.includes(97)); + * // Prints: true (97 is the decimal ASCII value for 'a') + * console.log(buf.includes(Buffer.from('a buffer example'))); + * // Prints: false + * console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); + * // Prints: true + * console.log(buf.includes('this', 4)); + * // Prints: false + * ``` + * @since v5.3.0 + * @param value What to search for. + * @param [byteOffset=0] Where to begin searching in `buf`. If negative, then offset is calculated from the end of `buf`. + * @param [encoding='utf8'] If `value` is a string, this is its encoding. + * @return `true` if `value` was found in `buf`, `false` otherwise. + */ + includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) of `buf` keys (indices). + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const key of buf.keys()) { + * console.log(key); + * } + * // Prints: + * // 0 + * // 1 + * // 2 + * // 3 + * // 4 + * // 5 + * ``` + * @since v1.1.0 + */ + keys(): IterableIterator; + /** + * Creates and returns an [iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols) for `buf` values (bytes). This function is + * called automatically when a `Buffer` is used in a `for..of` statement. + * + * ```js + * import { Buffer } from 'buffer'; + * + * const buf = Buffer.from('buffer'); + * + * for (const value of buf.values()) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * + * for (const value of buf) { + * console.log(value); + * } + * // Prints: + * // 98 + * // 117 + * // 102 + * // 102 + * // 101 + * // 114 + * ``` + * @since v1.1.0 + */ + values(): IterableIterator; + } + var Buffer: BufferConstructor; + /** + * Decodes a string of Base64-encoded data into bytes, and encodes those bytes + * into a string using Latin-1 (ISO-8859-1). + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @deprecated Use `Buffer.from(data, 'base64')` instead. + * @param data The Base64-encoded input string. + */ + function atob(data: string): string; + /** + * Decodes a string into bytes using Latin-1 (ISO-8859), and encodes those bytes + * into a string using Base64. + * + * The `data` may be any JavaScript-value that can be coerced into a string. + * + * **This function is only provided for compatibility with legacy web platform APIs** + * **and should never be used in new code, because they use strings to represent** + * **binary data and predate the introduction of typed arrays in JavaScript.** + * **For code running using Node.js APIs, converting between base64-encoded strings** + * **and binary data should be performed using `Buffer.from(str, 'base64')` and`buf.toString('base64')`.** + * @since v15.13.0, v14.17.0 + * @deprecated Use `buf.toString('base64')` instead. + * @param data An ASCII (Latin1) string. + */ + function btoa(data: string): string; + + interface Blob extends __Blob {} + /** + * `Blob` class is a global reference for `require('node:buffer').Blob` + * https://nodejs.org/api/buffer.html#class-blob + * @since v18.0.0 + */ + var Blob: typeof globalThis extends { + onmessage: any; + Blob: infer T; + } + ? T + : typeof NodeBlob; + } +} +declare module 'node:buffer' { + export * from 'buffer'; +} diff --git a/node_modules/@types/node/ts4.8/child_process.d.ts b/node_modules/@types/node/ts4.8/child_process.d.ts new file mode 100755 index 0000000..79c7290 --- /dev/null +++ b/node_modules/@types/node/ts4.8/child_process.d.ts @@ -0,0 +1,1369 @@ +/** + * The `child_process` module provides the ability to spawn subprocesses in + * a manner that is similar, but not identical, to [`popen(3)`](http://man7.org/linux/man-pages/man3/popen.3.html). This capability + * is primarily provided by the {@link spawn} function: + * + * ```js + * const { spawn } = require('child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * By default, pipes for `stdin`, `stdout`, and `stderr` are established between + * the parent Node.js process and the spawned subprocess. These pipes have + * limited (and platform-specific) capacity. If the subprocess writes to + * stdout in excess of that limit without the output being captured, the + * subprocess blocks waiting for the pipe buffer to accept more data. This is + * identical to the behavior of pipes in the shell. Use the `{ stdio: 'ignore' }`option if the output will not be consumed. + * + * The command lookup is performed using the `options.env.PATH` environment + * variable if `env` is in the `options` object. Otherwise, `process.env.PATH` is + * used. If `options.env` is set without `PATH`, lookup on Unix is performed + * on a default search path search of `/usr/bin:/bin` (see your operating system's + * manual for execvpe/execvp), on Windows the current processes environment + * variable `PATH` is used. + * + * On Windows, environment variables are case-insensitive. Node.js + * lexicographically sorts the `env` keys and uses the first one that + * case-insensitively matches. Only first (in lexicographic order) entry will be + * passed to the subprocess. This might lead to issues on Windows when passing + * objects to the `env` option that have multiple variants of the same key, such as`PATH` and `Path`. + * + * The {@link spawn} method spawns the child process asynchronously, + * without blocking the Node.js event loop. The {@link spawnSync} function provides equivalent functionality in a synchronous manner that blocks + * the event loop until the spawned process either exits or is terminated. + * + * For convenience, the `child_process` module provides a handful of synchronous + * and asynchronous alternatives to {@link spawn} and {@link spawnSync}. Each of these alternatives are implemented on + * top of {@link spawn} or {@link spawnSync}. + * + * * {@link exec}: spawns a shell and runs a command within that + * shell, passing the `stdout` and `stderr` to a callback function when + * complete. + * * {@link execFile}: similar to {@link exec} except + * that it spawns the command directly without first spawning a shell by + * default. + * * {@link fork}: spawns a new Node.js process and invokes a + * specified module with an IPC communication channel established that allows + * sending messages between parent and child. + * * {@link execSync}: a synchronous version of {@link exec} that will block the Node.js event loop. + * * {@link execFileSync}: a synchronous version of {@link execFile} that will block the Node.js event loop. + * + * For certain use cases, such as automating shell scripts, the `synchronous counterparts` may be more convenient. In many cases, however, + * the synchronous methods can have significant impact on performance due to + * stalling the event loop while spawned processes complete. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/child_process.js) + */ +declare module 'child_process' { + import { ObjectEncodingOptions } from 'node:fs'; + import { EventEmitter, Abortable } from 'node:events'; + import * as net from 'node:net'; + import { Writable, Readable, Stream, Pipe } from 'node:stream'; + import { URL } from 'node:url'; + type Serializable = string | object | number | boolean | bigint; + type SendHandle = net.Socket | net.Server; + /** + * Instances of the `ChildProcess` represent spawned child processes. + * + * Instances of `ChildProcess` are not intended to be created directly. Rather, + * use the {@link spawn}, {@link exec},{@link execFile}, or {@link fork} methods to create + * instances of `ChildProcess`. + * @since v2.2.0 + */ + class ChildProcess extends EventEmitter { + /** + * A `Writable Stream` that represents the child process's `stdin`. + * + * If a child process waits to read all of its input, the child will not continue + * until this stream has been closed via `end()`. + * + * If the child was spawned with `stdio[0]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdin` is an alias for `subprocess.stdio[0]`. Both properties will + * refer to the same value. + * + * The `subprocess.stdin` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.1.90 + */ + stdin: Writable | null; + /** + * A `Readable Stream` that represents the child process's `stdout`. + * + * If the child was spawned with `stdio[1]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stdout` is an alias for `subprocess.stdio[1]`. Both properties will + * refer to the same value. + * + * ```js + * const { spawn } = require('child_process'); + * + * const subprocess = spawn('ls'); + * + * subprocess.stdout.on('data', (data) => { + * console.log(`Received chunk ${data}`); + * }); + * ``` + * + * The `subprocess.stdout` property can be `null` if the child process could + * not be successfully spawned. + * @since v0.1.90 + */ + stdout: Readable | null; + /** + * A `Readable Stream` that represents the child process's `stderr`. + * + * If the child was spawned with `stdio[2]` set to anything other than `'pipe'`, + * then this will be `null`. + * + * `subprocess.stderr` is an alias for `subprocess.stdio[2]`. Both properties will + * refer to the same value. + * + * The `subprocess.stderr` property can be `null` if the child process could + * not be successfully spawned. + * @since v0.1.90 + */ + stderr: Readable | null; + /** + * The `subprocess.channel` property is a reference to the child's IPC channel. If + * no IPC channel currently exists, this property is `undefined`. + * @since v7.1.0 + */ + readonly channel?: Pipe | null | undefined; + /** + * A sparse array of pipes to the child process, corresponding with positions in + * the `stdio` option passed to {@link spawn} that have been set + * to the value `'pipe'`. `subprocess.stdio[0]`, `subprocess.stdio[1]`, and`subprocess.stdio[2]` are also available as `subprocess.stdin`,`subprocess.stdout`, and `subprocess.stderr`, + * respectively. + * + * In the following example, only the child's fd `1` (stdout) is configured as a + * pipe, so only the parent's `subprocess.stdio[1]` is a stream, all other values + * in the array are `null`. + * + * ```js + * const assert = require('assert'); + * const fs = require('fs'); + * const child_process = require('child_process'); + * + * const subprocess = child_process.spawn('ls', { + * stdio: [ + * 0, // Use parent's stdin for child. + * 'pipe', // Pipe child's stdout to parent. + * fs.openSync('err.out', 'w'), // Direct child's stderr to a file. + * ] + * }); + * + * assert.strictEqual(subprocess.stdio[0], null); + * assert.strictEqual(subprocess.stdio[0], subprocess.stdin); + * + * assert(subprocess.stdout); + * assert.strictEqual(subprocess.stdio[1], subprocess.stdout); + * + * assert.strictEqual(subprocess.stdio[2], null); + * assert.strictEqual(subprocess.stdio[2], subprocess.stderr); + * ``` + * + * The `subprocess.stdio` property can be `undefined` if the child process could + * not be successfully spawned. + * @since v0.7.10 + */ + readonly stdio: [ + Writable | null, + // stdin + Readable | null, + // stdout + Readable | null, + // stderr + Readable | Writable | null | undefined, + // extra + Readable | Writable | null | undefined // extra + ]; + /** + * The `subprocess.killed` property indicates whether the child process + * successfully received a signal from `subprocess.kill()`. The `killed` property + * does not indicate that the child process has been terminated. + * @since v0.5.10 + */ + readonly killed: boolean; + /** + * Returns the process identifier (PID) of the child process. If the child process + * fails to spawn due to errors, then the value is `undefined` and `error` is + * emitted. + * + * ```js + * const { spawn } = require('child_process'); + * const grep = spawn('grep', ['ssh']); + * + * console.log(`Spawned child pid: ${grep.pid}`); + * grep.stdin.end(); + * ``` + * @since v0.1.90 + */ + readonly pid?: number | undefined; + /** + * The `subprocess.connected` property indicates whether it is still possible to + * send and receive messages from a child process. When `subprocess.connected` is`false`, it is no longer possible to send or receive messages. + * @since v0.7.2 + */ + readonly connected: boolean; + /** + * The `subprocess.exitCode` property indicates the exit code of the child process. + * If the child process is still running, the field will be `null`. + */ + readonly exitCode: number | null; + /** + * The `subprocess.signalCode` property indicates the signal received by + * the child process if any, else `null`. + */ + readonly signalCode: NodeJS.Signals | null; + /** + * The `subprocess.spawnargs` property represents the full list of command-line + * arguments the child process was launched with. + */ + readonly spawnargs: string[]; + /** + * The `subprocess.spawnfile` property indicates the executable file name of + * the child process that is launched. + * + * For {@link fork}, its value will be equal to `process.execPath`. + * For {@link spawn}, its value will be the name of + * the executable file. + * For {@link exec}, its value will be the name of the shell + * in which the child process is launched. + */ + readonly spawnfile: string; + /** + * The `subprocess.kill()` method sends a signal to the child process. If no + * argument is given, the process will be sent the `'SIGTERM'` signal. See [`signal(7)`](http://man7.org/linux/man-pages/man7/signal.7.html) for a list of available signals. This function + * returns `true` if [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) succeeds, and `false` otherwise. + * + * ```js + * const { spawn } = require('child_process'); + * const grep = spawn('grep', ['ssh']); + * + * grep.on('close', (code, signal) => { + * console.log( + * `child process terminated due to receipt of signal ${signal}`); + * }); + * + * // Send SIGHUP to process. + * grep.kill('SIGHUP'); + * ``` + * + * The `ChildProcess` object may emit an `'error'` event if the signal + * cannot be delivered. Sending a signal to a child process that has already exited + * is not an error but may have unforeseen consequences. Specifically, if the + * process identifier (PID) has been reassigned to another process, the signal will + * be delivered to that process instead which can have unexpected results. + * + * While the function is called `kill`, the signal delivered to the child process + * may not actually terminate the process. + * + * See [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for reference. + * + * On Windows, where POSIX signals do not exist, the `signal` argument will be + * ignored, and the process will be killed forcefully and abruptly (similar to`'SIGKILL'`). + * See `Signal Events` for more details. + * + * On Linux, child processes of child processes will not be terminated + * when attempting to kill their parent. This is likely to happen when running a + * new process in a shell or with the use of the `shell` option of `ChildProcess`: + * + * ```js + * 'use strict'; + * const { spawn } = require('child_process'); + * + * const subprocess = spawn( + * 'sh', + * [ + * '-c', + * `node -e "setInterval(() => { + * console.log(process.pid, 'is alive') + * }, 500);"`, + * ], { + * stdio: ['inherit', 'inherit', 'inherit'] + * } + * ); + * + * setTimeout(() => { + * subprocess.kill(); // Does not terminate the Node.js process in the shell. + * }, 2000); + * ``` + * @since v0.1.90 + */ + kill(signal?: NodeJS.Signals | number): boolean; + /** + * When an IPC channel has been established between the parent and child ( + * i.e. when using {@link fork}), the `subprocess.send()` method can + * be used to send messages to the child process. When the child process is a + * Node.js instance, these messages can be received via the `'message'` event. + * + * The message goes through serialization and parsing. The resulting + * message might not be the same as what is originally sent. + * + * For example, in the parent script: + * + * ```js + * const cp = require('child_process'); + * const n = cp.fork(`${__dirname}/sub.js`); + * + * n.on('message', (m) => { + * console.log('PARENT got message:', m); + * }); + * + * // Causes the child to print: CHILD got message: { hello: 'world' } + * n.send({ hello: 'world' }); + * ``` + * + * And then the child script, `'sub.js'` might look like this: + * + * ```js + * process.on('message', (m) => { + * console.log('CHILD got message:', m); + * }); + * + * // Causes the parent to print: PARENT got message: { foo: 'bar', baz: null } + * process.send({ foo: 'bar', baz: NaN }); + * ``` + * + * Child Node.js processes will have a `process.send()` method of their own + * that allows the child to send messages back to the parent. + * + * There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages + * containing a `NODE_` prefix in the `cmd` property are reserved for use within + * Node.js core and will not be emitted in the child's `'message'` event. Rather, such messages are emitted using the`'internalMessage'` event and are consumed internally by Node.js. + * Applications should avoid using such messages or listening for`'internalMessage'` events as it is subject to change without notice. + * + * The optional `sendHandle` argument that may be passed to `subprocess.send()` is + * for passing a TCP server or socket object to the child process. The child will + * receive the object as the second argument passed to the callback function + * registered on the `'message'` event. Any data that is received + * and buffered in the socket will not be sent to the child. + * + * The optional `callback` is a function that is invoked after the message is + * sent but before the child may have received it. The function is called with a + * single argument: `null` on success, or an `Error` object on failure. + * + * If no `callback` function is provided and the message cannot be sent, an`'error'` event will be emitted by the `ChildProcess` object. This can + * happen, for instance, when the child process has already exited. + * + * `subprocess.send()` will return `false` if the channel has closed or when the + * backlog of unsent messages exceeds a threshold that makes it unwise to send + * more. Otherwise, the method returns `true`. The `callback` function can be + * used to implement flow control. + * + * #### Example: sending a server object + * + * The `sendHandle` argument can be used, for instance, to pass the handle of + * a TCP server object to the child process as illustrated in the example below: + * + * ```js + * const subprocess = require('child_process').fork('subprocess.js'); + * + * // Open up the server object and send the handle. + * const server = require('net').createServer(); + * server.on('connection', (socket) => { + * socket.end('handled by parent'); + * }); + * server.listen(1337, () => { + * subprocess.send('server', server); + * }); + * ``` + * + * The child would then receive the server object as: + * + * ```js + * process.on('message', (m, server) => { + * if (m === 'server') { + * server.on('connection', (socket) => { + * socket.end('handled by child'); + * }); + * } + * }); + * ``` + * + * Once the server is now shared between the parent and child, some connections + * can be handled by the parent and some by the child. + * + * While the example above uses a server created using the `net` module, `dgram`module servers use exactly the same workflow with the exceptions of listening on + * a `'message'` event instead of `'connection'` and using `server.bind()` instead + * of `server.listen()`. This is, however, currently only supported on Unix + * platforms. + * + * #### Example: sending a socket object + * + * Similarly, the `sendHandler` argument can be used to pass the handle of a + * socket to the child process. The example below spawns two children that each + * handle connections with "normal" or "special" priority: + * + * ```js + * const { fork } = require('child_process'); + * const normal = fork('subprocess.js', ['normal']); + * const special = fork('subprocess.js', ['special']); + * + * // Open up the server and send sockets to child. Use pauseOnConnect to prevent + * // the sockets from being read before they are sent to the child process. + * const server = require('net').createServer({ pauseOnConnect: true }); + * server.on('connection', (socket) => { + * + * // If this is special priority... + * if (socket.remoteAddress === '74.125.127.100') { + * special.send('socket', socket); + * return; + * } + * // This is normal priority. + * normal.send('socket', socket); + * }); + * server.listen(1337); + * ``` + * + * The `subprocess.js` would receive the socket handle as the second argument + * passed to the event callback function: + * + * ```js + * process.on('message', (m, socket) => { + * if (m === 'socket') { + * if (socket) { + * // Check that the client socket exists. + * // It is possible for the socket to be closed between the time it is + * // sent and the time it is received in the child process. + * socket.end(`Request handled with ${process.argv[2]} priority`); + * } + * } + * }); + * ``` + * + * Do not use `.maxConnections` on a socket that has been passed to a subprocess. + * The parent cannot track when the socket is destroyed. + * + * Any `'message'` handlers in the subprocess should verify that `socket` exists, + * as the connection may have been closed during the time it takes to send the + * connection to the child. + * @since v0.5.9 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: Serializable, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, callback?: (error: Error | null) => void): boolean; + send(message: Serializable, sendHandle?: SendHandle, options?: MessageOptions, callback?: (error: Error | null) => void): boolean; + /** + * Closes the IPC channel between parent and child, allowing the child to exit + * gracefully once there are no other connections keeping it alive. After calling + * this method the `subprocess.connected` and `process.connected` properties in + * both the parent and child (respectively) will be set to `false`, and it will be + * no longer possible to pass messages between the processes. + * + * The `'disconnect'` event will be emitted when there are no messages in the + * process of being received. This will most often be triggered immediately after + * calling `subprocess.disconnect()`. + * + * When the child process is a Node.js instance (e.g. spawned using {@link fork}), the `process.disconnect()` method can be invoked + * within the child process to close the IPC channel as well. + * @since v0.7.2 + */ + disconnect(): void; + /** + * By default, the parent will wait for the detached child to exit. To prevent the + * parent from waiting for a given `subprocess` to exit, use the`subprocess.unref()` method. Doing so will cause the parent's event loop to not + * include the child in its reference count, allowing the parent to exit + * independently of the child, unless there is an established IPC channel between + * the child and the parent. + * + * ```js + * const { spawn } = require('child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore' + * }); + * + * subprocess.unref(); + * ``` + * @since v0.7.10 + */ + unref(): void; + /** + * Calling `subprocess.ref()` after making a call to `subprocess.unref()` will + * restore the removed reference count for the child process, forcing the parent + * to wait for the child to exit before exiting itself. + * + * ```js + * const { spawn } = require('child_process'); + * + * const subprocess = spawn(process.argv[0], ['child_program.js'], { + * detached: true, + * stdio: 'ignore' + * }); + * + * subprocess.unref(); + * subprocess.ref(); + * ``` + * @since v0.7.10 + */ + ref(): void; + /** + * events.EventEmitter + * 1. close + * 2. disconnect + * 3. error + * 4. exit + * 5. message + * 6. spawn + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: 'disconnect', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + addListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + addListener(event: 'spawn', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close', code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: 'disconnect'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'exit', code: number | null, signal: NodeJS.Signals | null): boolean; + emit(event: 'message', message: Serializable, sendHandle: SendHandle): boolean; + emit(event: 'spawn', listener: () => void): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: 'disconnect', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + on(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + on(event: 'spawn', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: 'disconnect', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + once(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + once(event: 'spawn', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: 'disconnect', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependListener(event: 'spawn', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependOnceListener(event: 'disconnect', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'exit', listener: (code: number | null, signal: NodeJS.Signals | null) => void): this; + prependOnceListener(event: 'message', listener: (message: Serializable, sendHandle: SendHandle) => void): this; + prependOnceListener(event: 'spawn', listener: () => void): this; + } + // return this object when stdio option is undefined or not specified + interface ChildProcessWithoutNullStreams extends ChildProcess { + stdin: Writable; + stdout: Readable; + stderr: Readable; + readonly stdio: [ + Writable, + Readable, + Readable, + // stderr + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined // extra, no modification + ]; + } + // return this object when stdio option is a tuple of 3 + interface ChildProcessByStdio extends ChildProcess { + stdin: I; + stdout: O; + stderr: E; + readonly stdio: [ + I, + O, + E, + Readable | Writable | null | undefined, + // extra, no modification + Readable | Writable | null | undefined // extra, no modification + ]; + } + interface MessageOptions { + keepOpen?: boolean | undefined; + } + type IOType = 'overlapped' | 'pipe' | 'ignore' | 'inherit'; + type StdioOptions = IOType | Array; + type SerializationType = 'json' | 'advanced'; + interface MessagingOptions extends Abortable { + /** + * Specify the kind of serialization used for sending messages between processes. + * @default 'json' + */ + serialization?: SerializationType | undefined; + /** + * The signal value to be used when the spawned process will be killed by the abort signal. + * @default 'SIGTERM' + */ + killSignal?: NodeJS.Signals | number | undefined; + /** + * In milliseconds the maximum amount of time the process is allowed to run. + */ + timeout?: number | undefined; + } + interface ProcessEnvOptions { + uid?: number | undefined; + gid?: number | undefined; + cwd?: string | URL | undefined; + env?: NodeJS.ProcessEnv | undefined; + } + interface CommonOptions extends ProcessEnvOptions { + /** + * @default true + */ + windowsHide?: boolean | undefined; + /** + * @default 0 + */ + timeout?: number | undefined; + } + interface CommonSpawnOptions extends CommonOptions, MessagingOptions, Abortable { + argv0?: string | undefined; + stdio?: StdioOptions | undefined; + shell?: boolean | string | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + interface SpawnOptions extends CommonSpawnOptions { + detached?: boolean | undefined; + } + interface SpawnOptionsWithoutStdio extends SpawnOptions { + stdio?: StdioPipeNamed | StdioPipe[] | undefined; + } + type StdioNull = 'inherit' | 'ignore' | Stream; + type StdioPipeNamed = 'pipe' | 'overlapped'; + type StdioPipe = undefined | null | StdioPipeNamed; + interface SpawnOptionsWithStdioTuple extends SpawnOptions { + stdio: [Stdin, Stdout, Stderr]; + } + /** + * The `child_process.spawn()` method spawns a new process using the given`command`, with command-line arguments in `args`. If omitted, `args` defaults + * to an empty array. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * A third argument may be used to specify additional options, with these defaults: + * + * ```js + * const defaults = { + * cwd: undefined, + * env: process.env + * }; + * ``` + * + * Use `cwd` to specify the working directory from which the process is spawned. + * If not given, the default is to inherit the current working directory. If given, + * but the path does not exist, the child process emits an `ENOENT` error + * and exits immediately. `ENOENT` is also emitted when the command + * does not exist. + * + * Use `env` to specify environment variables that will be visible to the new + * process, the default is `process.env`. + * + * `undefined` values in `env` will be ignored. + * + * Example of running `ls -lh /usr`, capturing `stdout`, `stderr`, and the + * exit code: + * + * ```js + * const { spawn } = require('child_process'); + * const ls = spawn('ls', ['-lh', '/usr']); + * + * ls.stdout.on('data', (data) => { + * console.log(`stdout: ${data}`); + * }); + * + * ls.stderr.on('data', (data) => { + * console.error(`stderr: ${data}`); + * }); + * + * ls.on('close', (code) => { + * console.log(`child process exited with code ${code}`); + * }); + * ``` + * + * Example: A very elaborate way to run `ps ax | grep ssh` + * + * ```js + * const { spawn } = require('child_process'); + * const ps = spawn('ps', ['ax']); + * const grep = spawn('grep', ['ssh']); + * + * ps.stdout.on('data', (data) => { + * grep.stdin.write(data); + * }); + * + * ps.stderr.on('data', (data) => { + * console.error(`ps stderr: ${data}`); + * }); + * + * ps.on('close', (code) => { + * if (code !== 0) { + * console.log(`ps process exited with code ${code}`); + * } + * grep.stdin.end(); + * }); + * + * grep.stdout.on('data', (data) => { + * console.log(data.toString()); + * }); + * + * grep.stderr.on('data', (data) => { + * console.error(`grep stderr: ${data}`); + * }); + * + * grep.on('close', (code) => { + * if (code !== 0) { + * console.log(`grep process exited with code ${code}`); + * } + * }); + * ``` + * + * Example of checking for failed `spawn`: + * + * ```js + * const { spawn } = require('child_process'); + * const subprocess = spawn('bad_command'); + * + * subprocess.on('error', (err) => { + * console.error('Failed to start subprocess.'); + * }); + * ``` + * + * Certain platforms (macOS, Linux) will use the value of `argv[0]` for the process + * title while others (Windows, SunOS) will use `command`. + * + * Node.js currently overwrites `argv[0]` with `process.execPath` on startup, so`process.argv[0]` in a Node.js child process will not match the `argv0`parameter passed to `spawn` from the parent, + * retrieve it with the`process.argv0` property instead. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { spawn } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const grep = spawn('grep', ['ssh'], { signal }); + * grep.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * ``` + * @since v0.1.90 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawn(command: string, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, options: SpawnOptions): ChildProcess; + // overloads of spawn with 'args' + function spawn(command: string, args?: ReadonlyArray, options?: SpawnOptionsWithoutStdio): ChildProcessWithoutNullStreams; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptionsWithStdioTuple): ChildProcessByStdio; + function spawn(command: string, args: ReadonlyArray, options: SpawnOptions): ChildProcess; + interface ExecOptions extends CommonOptions { + shell?: string | undefined; + signal?: AbortSignal | undefined; + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + } + interface ExecOptionsWithStringEncoding extends ExecOptions { + encoding: BufferEncoding; + } + interface ExecOptionsWithBufferEncoding extends ExecOptions { + encoding: BufferEncoding | null; // specify `null`. + } + interface ExecException extends Error { + cmd?: string | undefined; + killed?: boolean | undefined; + code?: number | undefined; + signal?: NodeJS.Signals | undefined; + } + /** + * Spawns a shell then executes the `command` within that shell, buffering any + * generated output. The `command` string passed to the exec function is processed + * directly by the shell and special characters (vary based on [shell](https://en.wikipedia.org/wiki/List_of_command-line_interpreters)) + * need to be dealt with accordingly: + * + * ```js + * const { exec } = require('child_process'); + * + * exec('"/path/to/test file/test.sh" arg1 arg2'); + * // Double quotes are used so that the space in the path is not interpreted as + * // a delimiter of multiple arguments. + * + * exec('echo "The \\$HOME variable is $HOME"'); + * // The $HOME variable is escaped in the first instance, but not in the second. + * ``` + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * + * If a `callback` function is provided, it is called with the arguments`(error, stdout, stderr)`. On success, `error` will be `null`. On error,`error` will be an instance of `Error`. The + * `error.code` property will be + * the exit code of the process. By convention, any exit code other than `0`indicates an error. `error.signal` will be the signal that terminated the + * process. + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * ```js + * const { exec } = require('child_process'); + * exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => { + * if (error) { + * console.error(`exec error: ${error}`); + * return; + * } + * console.log(`stdout: ${stdout}`); + * console.error(`stderr: ${stderr}`); + * }); + * ``` + * + * If `timeout` is greater than `0`, the parent will send the signal + * identified by the `killSignal` property (the default is `'SIGTERM'`) if the + * child runs longer than `timeout` milliseconds. + * + * Unlike the [`exec(3)`](http://man7.org/linux/man-pages/man3/exec.3.html) POSIX system call, `child_process.exec()` does not replace + * the existing process and uses a shell to execute the command. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('util'); + * const exec = util.promisify(require('child_process').exec); + * + * async function lsExample() { + * const { stdout, stderr } = await exec('ls'); + * console.log('stdout:', stdout); + * console.error('stderr:', stderr); + * } + * lsExample(); + * ``` + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { exec } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = exec('grep ssh', { signal }, (error) => { + * console.log(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.90 + * @param command The command to run, with space-separated arguments. + * @param callback called with the output when process terminates. + */ + function exec(command: string, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function exec( + command: string, + options: { + encoding: 'buffer' | null; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: Buffer, stderr: Buffer) => void + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string, stderr: string) => void + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function exec( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function exec(command: string, options: ExecOptions, callback?: (error: ExecException | null, stdout: string, stderr: string) => void): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function exec( + command: string, + options: (ObjectEncodingOptions & ExecOptions) | undefined | null, + callback?: (error: ExecException | null, stdout: string | Buffer, stderr: string | Buffer) => void + ): ChildProcess; + interface PromiseWithChild extends Promise { + child: ChildProcess; + } + namespace exec { + function __promisify__(command: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: { + encoding: 'buffer' | null; + } & ExecOptions + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + command: string, + options: { + encoding: BufferEncoding; + } & ExecOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options: ExecOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + command: string, + options?: (ObjectEncodingOptions & ExecOptions) | null + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ExecFileOptions extends CommonOptions, Abortable { + maxBuffer?: number | undefined; + killSignal?: NodeJS.Signals | number | undefined; + windowsVerbatimArguments?: boolean | undefined; + shell?: boolean | string | undefined; + signal?: AbortSignal | undefined; + } + interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { + encoding: 'buffer' | null; + } + interface ExecFileOptionsWithOtherEncoding extends ExecFileOptions { + encoding: BufferEncoding; + } + type ExecFileException = ExecException & NodeJS.ErrnoException; + /** + * The `child_process.execFile()` function is similar to {@link exec} except that it does not spawn a shell by default. Rather, the specified + * executable `file` is spawned directly as a new process making it slightly more + * efficient than {@link exec}. + * + * The same options as {@link exec} are supported. Since a shell is + * not spawned, behaviors such as I/O redirection and file globbing are not + * supported. + * + * ```js + * const { execFile } = require('child_process'); + * const child = execFile('node', ['--version'], (error, stdout, stderr) => { + * if (error) { + * throw error; + * } + * console.log(stdout); + * }); + * ``` + * + * The `stdout` and `stderr` arguments passed to the callback will contain the + * stdout and stderr output of the child process. By default, Node.js will decode + * the output as UTF-8 and pass strings to the callback. The `encoding` option + * can be used to specify the character encoding used to decode the stdout and + * stderr output. If `encoding` is `'buffer'`, or an unrecognized character + * encoding, `Buffer` objects will be passed to the callback instead. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `stdout` and `stderr` properties. The returned`ChildProcess` instance is attached to the `Promise` as a `child` property. In + * case of an error (including any error resulting in an exit code other than 0), a + * rejected promise is returned, with the same `error` object given in the + * callback, but with two additional properties `stdout` and `stderr`. + * + * ```js + * const util = require('util'); + * const execFile = util.promisify(require('child_process').execFile); + * async function getVersion() { + * const { stdout } = await execFile('node', ['--version']); + * console.log(stdout); + * } + * getVersion(); + * ``` + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * const { execFile } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = execFile('node', ['--version'], { signal }, (error) => { + * console.log(error); // an AbortError + * }); + * controller.abort(); + * ``` + * @since v0.1.91 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @param callback Called with the output when process terminates. + */ + function execFile(file: string): ChildProcess; + function execFile(file: string, options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null): ChildProcess; + function execFile(file: string, args?: ReadonlyArray | null): ChildProcess; + function execFile(file: string, args: ReadonlyArray | undefined | null, options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null): ChildProcess; + // no `options` definitely means stdout/stderr are `string`. + function execFile(file: string, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + function execFile(file: string, args: ReadonlyArray | undefined | null, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + // `options` with `"buffer"` or `null` for `encoding` means stdout/stderr are definitely `Buffer`. + function execFile(file: string, options: ExecFileOptionsWithBufferEncoding, callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithBufferEncoding, + callback: (error: ExecFileException | null, stdout: Buffer, stderr: Buffer) => void + ): ChildProcess; + // `options` with well known `encoding` means stdout/stderr are definitely `string`. + function execFile(file: string, options: ExecFileOptionsWithStringEncoding, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithStringEncoding, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void + ): ChildProcess; + // `options` with an `encoding` whose type is `string` means stdout/stderr could either be `Buffer` or `string`. + // There is no guarantee the `encoding` is unknown as `string` is a superset of `BufferEncoding`. + function execFile(file: string, options: ExecFileOptionsWithOtherEncoding, callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithOtherEncoding, + callback: (error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void + ): ChildProcess; + // `options` without an `encoding` means stdout/stderr are definitely `string`. + function execFile(file: string, options: ExecFileOptions, callback: (error: ExecFileException | null, stdout: string, stderr: string) => void): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptions, + callback: (error: ExecFileException | null, stdout: string, stderr: string) => void + ): ChildProcess; + // fallback if nothing else matches. Worst case is always `string | Buffer`. + function execFile( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) | undefined | null + ): ChildProcess; + function execFile( + file: string, + args: ReadonlyArray | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null, + callback: ((error: ExecFileException | null, stdout: string | Buffer, stderr: string | Buffer) => void) | undefined | null + ): ChildProcess; + namespace execFile { + function __promisify__(file: string): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithBufferEncoding + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithBufferEncoding + ): PromiseWithChild<{ + stdout: Buffer; + stderr: Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithStringEncoding + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithStringEncoding + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: ExecFileOptionsWithOtherEncoding + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptionsWithOtherEncoding + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + options: ExecFileOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: ExecFileOptions + ): PromiseWithChild<{ + stdout: string; + stderr: string; + }>; + function __promisify__( + file: string, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + function __promisify__( + file: string, + args: ReadonlyArray | undefined | null, + options: (ObjectEncodingOptions & ExecFileOptions) | undefined | null + ): PromiseWithChild<{ + stdout: string | Buffer; + stderr: string | Buffer; + }>; + } + interface ForkOptions extends ProcessEnvOptions, MessagingOptions, Abortable { + execPath?: string | undefined; + execArgv?: string[] | undefined; + silent?: boolean | undefined; + stdio?: StdioOptions | undefined; + detached?: boolean | undefined; + windowsVerbatimArguments?: boolean | undefined; + } + /** + * The `child_process.fork()` method is a special case of {@link spawn} used specifically to spawn new Node.js processes. + * Like {@link spawn}, a `ChildProcess` object is returned. The + * returned `ChildProcess` will have an additional communication channel + * built-in that allows messages to be passed back and forth between the parent and + * child. See `subprocess.send()` for details. + * + * Keep in mind that spawned Node.js child processes are + * independent of the parent with exception of the IPC communication channel + * that is established between the two. Each process has its own memory, with + * their own V8 instances. Because of the additional resource allocations + * required, spawning a large number of child Node.js processes is not + * recommended. + * + * By default, `child_process.fork()` will spawn new Node.js instances using the `process.execPath` of the parent process. The `execPath` property in the`options` object allows for an alternative + * execution path to be used. + * + * Node.js processes launched with a custom `execPath` will communicate with the + * parent process using the file descriptor (fd) identified using the + * environment variable `NODE_CHANNEL_FD` on the child process. + * + * Unlike the [`fork(2)`](http://man7.org/linux/man-pages/man2/fork.2.html) POSIX system call, `child_process.fork()` does not clone the + * current process. + * + * The `shell` option available in {@link spawn} is not supported by`child_process.fork()` and will be ignored if set. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.kill()` on the child process except + * the error passed to the callback will be an `AbortError`: + * + * ```js + * if (process.argv[2] === 'child') { + * setTimeout(() => { + * console.log(`Hello from ${process.argv[2]}!`); + * }, 1_000); + * } else { + * const { fork } = require('child_process'); + * const controller = new AbortController(); + * const { signal } = controller; + * const child = fork(__filename, ['child'], { signal }); + * child.on('error', (err) => { + * // This will be called with err being an AbortError if the controller aborts + * }); + * controller.abort(); // Stops the child process + * } + * ``` + * @since v0.5.0 + * @param modulePath The module to run in the child. + * @param args List of string arguments. + */ + function fork(modulePath: string, options?: ForkOptions): ChildProcess; + function fork(modulePath: string, args?: ReadonlyArray, options?: ForkOptions): ChildProcess; + interface SpawnSyncOptions extends CommonSpawnOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | 'buffer' | null | undefined; + } + interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { + encoding: BufferEncoding; + } + interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { + encoding?: 'buffer' | null | undefined; + } + interface SpawnSyncReturns { + pid: number; + output: Array; + stdout: T; + stderr: T; + status: number | null; + signal: NodeJS.Signals | null; + error?: Error | undefined; + } + /** + * The `child_process.spawnSync()` method is generally identical to {@link spawn} with the exception that the function will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the process intercepts and handles the `SIGTERM` signal + * and doesn't exit, the parent process will wait until the child process has + * exited. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @param args List of string arguments. + */ + function spawnSync(command: string): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns; + function spawnSync(command: string, args: ReadonlyArray): SpawnSyncReturns; + function spawnSync(command: string, args: ReadonlyArray, options: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns; + function spawnSync(command: string, args: ReadonlyArray, options: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns; + function spawnSync(command: string, args?: ReadonlyArray, options?: SpawnSyncOptions): SpawnSyncReturns; + interface CommonExecOptions extends CommonOptions { + input?: string | NodeJS.ArrayBufferView | undefined; + stdio?: StdioOptions | undefined; + killSignal?: NodeJS.Signals | number | undefined; + maxBuffer?: number | undefined; + encoding?: BufferEncoding | 'buffer' | null | undefined; + } + interface ExecSyncOptions extends CommonExecOptions { + shell?: string | undefined; + } + interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { + encoding: BufferEncoding; + } + interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { + encoding?: 'buffer' | null | undefined; + } + /** + * The `child_process.execSync()` method is generally identical to {@link exec} with the exception that the method will not return + * until the child process has fully closed. When a timeout has been encountered + * and `killSignal` is sent, the method won't return until the process has + * completely exited. If the child process intercepts and handles the `SIGTERM`signal and doesn't exit, the parent process will wait until the child process + * has exited. + * + * If the process times out or has a non-zero exit code, this method will throw. + * The `Error` object will contain the entire result from {@link spawnSync}. + * + * **Never pass unsanitized user input to this function. Any input containing shell** + * **metacharacters may be used to trigger arbitrary command execution.** + * @since v0.11.12 + * @param command The command to run. + * @return The stdout from the command. + */ + function execSync(command: string): Buffer; + function execSync(command: string, options: ExecSyncOptionsWithStringEncoding): string; + function execSync(command: string, options: ExecSyncOptionsWithBufferEncoding): Buffer; + function execSync(command: string, options?: ExecSyncOptions): string | Buffer; + interface ExecFileSyncOptions extends CommonExecOptions { + shell?: boolean | string | undefined; + } + interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { + encoding: BufferEncoding; + } + interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { + encoding?: 'buffer' | null; // specify `null`. + } + /** + * The `child_process.execFileSync()` method is generally identical to {@link execFile} with the exception that the method will not + * return until the child process has fully closed. When a timeout has been + * encountered and `killSignal` is sent, the method won't return until the process + * has completely exited. + * + * If the child process intercepts and handles the `SIGTERM` signal and + * does not exit, the parent process will still wait until the child process has + * exited. + * + * If the process times out or has a non-zero exit code, this method will throw an `Error` that will include the full result of the underlying {@link spawnSync}. + * + * **If the `shell` option is enabled, do not pass unsanitized user input to this** + * **function. Any input containing shell metacharacters may be used to trigger** + * **arbitrary command execution.** + * @since v0.11.12 + * @param file The name or path of the executable file to run. + * @param args List of string arguments. + * @return The stdout from the command. + */ + function execFileSync(file: string): Buffer; + function execFileSync(file: string, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, options?: ExecFileSyncOptions): string | Buffer; + function execFileSync(file: string, args: ReadonlyArray): Buffer; + function execFileSync(file: string, args: ReadonlyArray, options: ExecFileSyncOptionsWithStringEncoding): string; + function execFileSync(file: string, args: ReadonlyArray, options: ExecFileSyncOptionsWithBufferEncoding): Buffer; + function execFileSync(file: string, args?: ReadonlyArray, options?: ExecFileSyncOptions): string | Buffer; +} +declare module 'node:child_process' { + export * from 'child_process'; +} diff --git a/node_modules/@types/node/ts4.8/cluster.d.ts b/node_modules/@types/node/ts4.8/cluster.d.ts new file mode 100755 index 0000000..37dbc57 --- /dev/null +++ b/node_modules/@types/node/ts4.8/cluster.d.ts @@ -0,0 +1,410 @@ +/** + * Clusters of Node.js processes can be used to run multiple instances of Node.js + * that can distribute workloads among their application threads. When process + * isolation is not needed, use the `worker_threads` module instead, which + * allows running multiple application threads within a single Node.js instance. + * + * The cluster module allows easy creation of child processes that all share + * server ports. + * + * ```js + * import cluster from 'cluster'; + * import http from 'http'; + * import { cpus } from 'os'; + * import process from 'process'; + * + * const numCPUs = cpus().length; + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('exit', (worker, code, signal) => { + * console.log(`worker ${worker.process.pid} died`); + * }); + * } else { + * // Workers can share any TCP connection + * // In this case it is an HTTP server + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * + * console.log(`Worker ${process.pid} started`); + * } + * ``` + * + * Running Node.js will now share port 8000 between the workers: + * + * ```console + * $ node server.js + * Primary 3596 is running + * Worker 4324 started + * Worker 4520 started + * Worker 6056 started + * Worker 5644 started + * ``` + * + * On Windows, it is not yet possible to set up a named pipe server in a worker. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/cluster.js) + */ +declare module 'cluster' { + import * as child from 'node:child_process'; + import EventEmitter = require('node:events'); + import * as net from 'node:net'; + export interface ClusterSettings { + execArgv?: string[] | undefined; // default: process.execArgv + exec?: string | undefined; + args?: string[] | undefined; + silent?: boolean | undefined; + stdio?: any[] | undefined; + uid?: number | undefined; + gid?: number | undefined; + inspectPort?: number | (() => number) | undefined; + } + export interface Address { + address: string; + port: number; + addressType: number | 'udp4' | 'udp6'; // 4, 6, -1, "udp4", "udp6" + } + /** + * A `Worker` object contains all public information and method about a worker. + * In the primary it can be obtained using `cluster.workers`. In a worker + * it can be obtained using `cluster.worker`. + * @since v0.7.0 + */ + export class Worker extends EventEmitter { + /** + * Each new worker is given its own unique id, this id is stored in the`id`. + * + * While a worker is alive, this is the key that indexes it in`cluster.workers`. + * @since v0.8.0 + */ + id: number; + /** + * All workers are created using `child_process.fork()`, the returned object + * from this function is stored as `.process`. In a worker, the global `process`is stored. + * + * See: `Child Process module`. + * + * Workers will call `process.exit(0)` if the `'disconnect'` event occurs + * on `process` and `.exitedAfterDisconnect` is not `true`. This protects against + * accidental disconnection. + * @since v0.7.0 + */ + process: child.ChildProcess; + /** + * Send a message to a worker or primary, optionally with a handle. + * + * In the primary, this sends a message to a specific worker. It is identical to `ChildProcess.send()`. + * + * In a worker, this sends a message to the primary. It is identical to`process.send()`. + * + * This example will echo back all messages from the primary: + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * worker.send('hi there'); + * + * } else if (cluster.isWorker) { + * process.on('message', (msg) => { + * process.send(msg); + * }); + * } + * ``` + * @since v0.7.0 + * @param options The `options` argument, if present, is an object used to parameterize the sending of certain types of handles. `options` supports the following properties: + */ + send(message: child.Serializable, callback?: (error: Error | null) => void): boolean; + send(message: child.Serializable, sendHandle: child.SendHandle, callback?: (error: Error | null) => void): boolean; + send(message: child.Serializable, sendHandle: child.SendHandle, options?: child.MessageOptions, callback?: (error: Error | null) => void): boolean; + /** + * This function will kill the worker. In the primary worker, it does this by + * disconnecting the `worker.process`, and once disconnected, killing with`signal`. In the worker, it does it by killing the process with `signal`. + * + * The `kill()` function kills the worker process without waiting for a graceful + * disconnect, it has the same behavior as `worker.process.kill()`. + * + * This method is aliased as `worker.destroy()` for backwards compatibility. + * + * In a worker, `process.kill()` exists, but it is not this function; + * it is `kill()`. + * @since v0.9.12 + * @param [signal='SIGTERM'] Name of the kill signal to send to the worker process. + */ + kill(signal?: string): void; + destroy(signal?: string): void; + /** + * In a worker, this function will close all servers, wait for the `'close'` event + * on those servers, and then disconnect the IPC channel. + * + * In the primary, an internal message is sent to the worker causing it to call`.disconnect()` on itself. + * + * Causes `.exitedAfterDisconnect` to be set. + * + * After a server is closed, it will no longer accept new connections, + * but connections may be accepted by any other listening worker. Existing + * connections will be allowed to close as usual. When no more connections exist, + * see `server.close()`, the IPC channel to the worker will close allowing it + * to die gracefully. + * + * The above applies _only_ to server connections, client connections are not + * automatically closed by workers, and disconnect does not wait for them to close + * before exiting. + * + * In a worker, `process.disconnect` exists, but it is not this function; + * it is `disconnect()`. + * + * Because long living server connections may block workers from disconnecting, it + * may be useful to send a message, so application specific actions may be taken to + * close them. It also may be useful to implement a timeout, killing a worker if + * the `'disconnect'` event has not been emitted after some time. + * + * ```js + * if (cluster.isPrimary) { + * const worker = cluster.fork(); + * let timeout; + * + * worker.on('listening', (address) => { + * worker.send('shutdown'); + * worker.disconnect(); + * timeout = setTimeout(() => { + * worker.kill(); + * }, 2000); + * }); + * + * worker.on('disconnect', () => { + * clearTimeout(timeout); + * }); + * + * } else if (cluster.isWorker) { + * const net = require('net'); + * const server = net.createServer((socket) => { + * // Connections never end + * }); + * + * server.listen(8000); + * + * process.on('message', (msg) => { + * if (msg === 'shutdown') { + * // Initiate graceful close of any connections to server + * } + * }); + * } + * ``` + * @since v0.7.7 + * @return A reference to `worker`. + */ + disconnect(): void; + /** + * This function returns `true` if the worker is connected to its primary via its + * IPC channel, `false` otherwise. A worker is connected to its primary after it + * has been created. It is disconnected after the `'disconnect'` event is emitted. + * @since v0.11.14 + */ + isConnected(): boolean; + /** + * This function returns `true` if the worker's process has terminated (either + * because of exiting or being signaled). Otherwise, it returns `false`. + * + * ```js + * import cluster from 'cluster'; + * import http from 'http'; + * import { cpus } from 'os'; + * import process from 'process'; + * + * const numCPUs = cpus().length; + * + * if (cluster.isPrimary) { + * console.log(`Primary ${process.pid} is running`); + * + * // Fork workers. + * for (let i = 0; i < numCPUs; i++) { + * cluster.fork(); + * } + * + * cluster.on('fork', (worker) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * + * cluster.on('exit', (worker, code, signal) => { + * console.log('worker is dead:', worker.isDead()); + * }); + * } else { + * // Workers can share any TCP connection. In this case, it is an HTTP server. + * http.createServer((req, res) => { + * res.writeHead(200); + * res.end(`Current process\n ${process.pid}`); + * process.kill(process.pid); + * }).listen(8000); + * } + * ``` + * @since v0.11.14 + */ + isDead(): boolean; + /** + * This property is `true` if the worker exited due to `.disconnect()`. + * If the worker exited any other way, it is `false`. If the + * worker has not exited, it is `undefined`. + * + * The boolean `worker.exitedAfterDisconnect` allows distinguishing between + * voluntary and accidental exit, the primary may choose not to respawn a worker + * based on this value. + * + * ```js + * cluster.on('exit', (worker, code, signal) => { + * if (worker.exitedAfterDisconnect === true) { + * console.log('Oh, it was just voluntary – no need to worry'); + * } + * }); + * + * // kill worker + * worker.kill(); + * ``` + * @since v6.0.0 + */ + exitedAfterDisconnect: boolean; + /** + * events.EventEmitter + * 1. disconnect + * 2. error + * 3. exit + * 4. listening + * 5. message + * 6. online + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'disconnect', listener: () => void): this; + addListener(event: 'error', listener: (error: Error) => void): this; + addListener(event: 'exit', listener: (code: number, signal: string) => void): this; + addListener(event: 'listening', listener: (address: Address) => void): this; + addListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: 'online', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'disconnect'): boolean; + emit(event: 'error', error: Error): boolean; + emit(event: 'exit', code: number, signal: string): boolean; + emit(event: 'listening', address: Address): boolean; + emit(event: 'message', message: any, handle: net.Socket | net.Server): boolean; + emit(event: 'online'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'disconnect', listener: () => void): this; + on(event: 'error', listener: (error: Error) => void): this; + on(event: 'exit', listener: (code: number, signal: string) => void): this; + on(event: 'listening', listener: (address: Address) => void): this; + on(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: 'online', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'disconnect', listener: () => void): this; + once(event: 'error', listener: (error: Error) => void): this; + once(event: 'exit', listener: (code: number, signal: string) => void): this; + once(event: 'listening', listener: (address: Address) => void): this; + once(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: 'online', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'disconnect', listener: () => void): this; + prependListener(event: 'error', listener: (error: Error) => void): this; + prependListener(event: 'exit', listener: (code: number, signal: string) => void): this; + prependListener(event: 'listening', listener: (address: Address) => void): this; + prependListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: 'online', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'disconnect', listener: () => void): this; + prependOnceListener(event: 'error', listener: (error: Error) => void): this; + prependOnceListener(event: 'exit', listener: (code: number, signal: string) => void): this; + prependOnceListener(event: 'listening', listener: (address: Address) => void): this; + prependOnceListener(event: 'message', listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: 'online', listener: () => void): this; + } + export interface Cluster extends EventEmitter { + disconnect(callback?: () => void): void; + fork(env?: any): Worker; + /** @deprecated since v16.0.0 - use isPrimary. */ + readonly isMaster: boolean; + readonly isPrimary: boolean; + readonly isWorker: boolean; + schedulingPolicy: number; + readonly settings: ClusterSettings; + /** @deprecated since v16.0.0 - use setupPrimary. */ + setupMaster(settings?: ClusterSettings): void; + /** + * `setupPrimary` is used to change the default 'fork' behavior. Once called, the settings will be present in cluster.settings. + */ + setupPrimary(settings?: ClusterSettings): void; + readonly worker?: Worker | undefined; + readonly workers?: NodeJS.Dict | undefined; + readonly SCHED_NONE: number; + readonly SCHED_RR: number; + /** + * events.EventEmitter + * 1. disconnect + * 2. exit + * 3. fork + * 4. listening + * 5. message + * 6. online + * 7. setup + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'disconnect', listener: (worker: Worker) => void): this; + addListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + addListener(event: 'fork', listener: (worker: Worker) => void): this; + addListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + addListener(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + addListener(event: 'online', listener: (worker: Worker) => void): this; + addListener(event: 'setup', listener: (settings: ClusterSettings) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'disconnect', worker: Worker): boolean; + emit(event: 'exit', worker: Worker, code: number, signal: string): boolean; + emit(event: 'fork', worker: Worker): boolean; + emit(event: 'listening', worker: Worker, address: Address): boolean; + emit(event: 'message', worker: Worker, message: any, handle: net.Socket | net.Server): boolean; + emit(event: 'online', worker: Worker): boolean; + emit(event: 'setup', settings: ClusterSettings): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'disconnect', listener: (worker: Worker) => void): this; + on(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + on(event: 'fork', listener: (worker: Worker) => void): this; + on(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + on(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + on(event: 'online', listener: (worker: Worker) => void): this; + on(event: 'setup', listener: (settings: ClusterSettings) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'disconnect', listener: (worker: Worker) => void): this; + once(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + once(event: 'fork', listener: (worker: Worker) => void): this; + once(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + once(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. + once(event: 'online', listener: (worker: Worker) => void): this; + once(event: 'setup', listener: (settings: ClusterSettings) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'disconnect', listener: (worker: Worker) => void): this; + prependListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + prependListener(event: 'fork', listener: (worker: Worker) => void): this; + prependListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependListener(event: 'message', listener: (worker: Worker, message: any, handle?: net.Socket | net.Server) => void): this; + prependListener(event: 'online', listener: (worker: Worker) => void): this; + prependListener(event: 'setup', listener: (settings: ClusterSettings) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'disconnect', listener: (worker: Worker) => void): this; + prependOnceListener(event: 'exit', listener: (worker: Worker, code: number, signal: string) => void): this; + prependOnceListener(event: 'fork', listener: (worker: Worker) => void): this; + prependOnceListener(event: 'listening', listener: (worker: Worker, address: Address) => void): this; + // the handle is a net.Socket or net.Server object, or undefined. + prependOnceListener(event: 'message', listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; + prependOnceListener(event: 'online', listener: (worker: Worker) => void): this; + prependOnceListener(event: 'setup', listener: (settings: ClusterSettings) => void): this; + } + const cluster: Cluster; + export default cluster; +} +declare module 'node:cluster' { + export * from 'cluster'; + export { default as default } from 'cluster'; +} diff --git a/node_modules/@types/node/ts4.8/console.d.ts b/node_modules/@types/node/ts4.8/console.d.ts new file mode 100755 index 0000000..16c9137 --- /dev/null +++ b/node_modules/@types/node/ts4.8/console.d.ts @@ -0,0 +1,412 @@ +/** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/console.js) + */ +declare module 'console' { + import console = require('node:console'); + export = console; +} +declare module 'node:console' { + import { InspectOptions } from 'node:util'; + global { + // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build + interface Console { + Console: console.ConsoleConstructor; + /** + * `console.assert()` writes a message if `value` is [falsy](https://developer.mozilla.org/en-US/docs/Glossary/Falsy) or omitted. It only + * writes a message and does not otherwise affect execution. The output always + * starts with `"Assertion failed"`. If provided, `message` is formatted using `util.format()`. + * + * If `value` is [truthy](https://developer.mozilla.org/en-US/docs/Glossary/Truthy), nothing happens. + * + * ```js + * console.assert(true, 'does nothing'); + * + * console.assert(false, 'Whoops %s work', 'didn\'t'); + * // Assertion failed: Whoops didn't work + * + * console.assert(); + * // Assertion failed + * ``` + * @since v0.1.101 + * @param value The value tested for being truthy. + * @param message All arguments besides `value` are used as error message. + */ + assert(value: any, message?: string, ...optionalParams: any[]): void; + /** + * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the + * TTY. When `stdout` is not a TTY, this method does nothing. + * + * The specific operation of `console.clear()` can vary across operating systems + * and terminal types. For most Linux operating systems, `console.clear()`operates similarly to the `clear` shell command. On Windows, `console.clear()`will clear only the output in the + * current terminal viewport for the Node.js + * binary. + * @since v8.3.0 + */ + clear(): void; + /** + * Maintains an internal counter specific to `label` and outputs to `stdout` the + * number of times `console.count()` has been called with the given `label`. + * + * ```js + * > console.count() + * default: 1 + * undefined + * > console.count('default') + * default: 2 + * undefined + * > console.count('abc') + * abc: 1 + * undefined + * > console.count('xyz') + * xyz: 1 + * undefined + * > console.count('abc') + * abc: 2 + * undefined + * > console.count() + * default: 3 + * undefined + * > + * ``` + * @since v8.3.0 + * @param label The display label for the counter. + */ + count(label?: string): void; + /** + * Resets the internal counter specific to `label`. + * + * ```js + * > console.count('abc'); + * abc: 1 + * undefined + * > console.countReset('abc'); + * undefined + * > console.count('abc'); + * abc: 1 + * undefined + * > + * ``` + * @since v8.3.0 + * @param label The display label for the counter. + */ + countReset(label?: string): void; + /** + * The `console.debug()` function is an alias for {@link log}. + * @since v8.0.0 + */ + debug(message?: any, ...optionalParams: any[]): void; + /** + * Uses `util.inspect()` on `obj` and prints the resulting string to `stdout`. + * This function bypasses any custom `inspect()` function defined on `obj`. + * @since v0.1.101 + */ + dir(obj: any, options?: InspectOptions): void; + /** + * This method calls `console.log()` passing it the arguments received. + * This method does not produce any XML formatting. + * @since v8.0.0 + */ + dirxml(...data: any[]): void; + /** + * Prints to `stderr` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const code = 5; + * console.error('error #%d', code); + * // Prints: error #5, to stderr + * console.error('error', code); + * // Prints: error 5, to stderr + * ``` + * + * If formatting elements (e.g. `%d`) are not found in the first string then `util.inspect()` is called on each argument and the resulting string + * values are concatenated. See `util.format()` for more information. + * @since v0.1.100 + */ + error(message?: any, ...optionalParams: any[]): void; + /** + * Increases indentation of subsequent lines by spaces for `groupIndentation`length. + * + * If one or more `label`s are provided, those are printed first without the + * additional indentation. + * @since v8.5.0 + */ + group(...label: any[]): void; + /** + * An alias for {@link group}. + * @since v8.5.0 + */ + groupCollapsed(...label: any[]): void; + /** + * Decreases indentation of subsequent lines by spaces for `groupIndentation`length. + * @since v8.5.0 + */ + groupEnd(): void; + /** + * The `console.info()` function is an alias for {@link log}. + * @since v0.1.100 + */ + info(message?: any, ...optionalParams: any[]): void; + /** + * Prints to `stdout` with newline. Multiple arguments can be passed, with the + * first used as the primary message and all additional used as substitution + * values similar to [`printf(3)`](http://man7.org/linux/man-pages/man3/printf.3.html) (the arguments are all passed to `util.format()`). + * + * ```js + * const count = 5; + * console.log('count: %d', count); + * // Prints: count: 5, to stdout + * console.log('count:', count); + * // Prints: count: 5, to stdout + * ``` + * + * See `util.format()` for more information. + * @since v0.1.100 + */ + log(message?: any, ...optionalParams: any[]): void; + /** + * Try to construct a table with the columns of the properties of `tabularData`(or use `properties`) and rows of `tabularData` and log it. Falls back to just + * logging the argument if it can’t be parsed as tabular. + * + * ```js + * // These can't be parsed as tabular data + * console.table(Symbol()); + * // Symbol() + * + * console.table(undefined); + * // undefined + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]); + * // ┌─────────┬─────┬─────┐ + * // │ (index) │ a │ b │ + * // ├─────────┼─────┼─────┤ + * // │ 0 │ 1 │ 'Y' │ + * // │ 1 │ 'Z' │ 2 │ + * // └─────────┴─────┴─────┘ + * + * console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']); + * // ┌─────────┬─────┐ + * // │ (index) │ a │ + * // ├─────────┼─────┤ + * // │ 0 │ 1 │ + * // │ 1 │ 'Z' │ + * // └─────────┴─────┘ + * ``` + * @since v10.0.0 + * @param properties Alternate properties for constructing the table. + */ + table(tabularData: any, properties?: ReadonlyArray): void; + /** + * Starts a timer that can be used to compute the duration of an operation. Timers + * are identified by a unique `label`. Use the same `label` when calling {@link timeEnd} to stop the timer and output the elapsed time in + * suitable time units to `stdout`. For example, if the elapsed + * time is 3869ms, `console.timeEnd()` displays "3.869s". + * @since v0.1.104 + */ + time(label?: string): void; + /** + * Stops a timer that was previously started by calling {@link time} and + * prints the result to `stdout`: + * + * ```js + * console.time('100-elements'); + * for (let i = 0; i < 100; i++) {} + * console.timeEnd('100-elements'); + * // prints 100-elements: 225.438ms + * ``` + * @since v0.1.104 + */ + timeEnd(label?: string): void; + /** + * For a timer that was previously started by calling {@link time}, prints + * the elapsed time and other `data` arguments to `stdout`: + * + * ```js + * console.time('process'); + * const value = expensiveProcess1(); // Returns 42 + * console.timeLog('process', value); + * // Prints "process: 365.227ms 42". + * doExpensiveProcess2(value); + * console.timeEnd('process'); + * ``` + * @since v10.7.0 + */ + timeLog(label?: string, ...data: any[]): void; + /** + * Prints to `stderr` the string `'Trace: '`, followed by the `util.format()` formatted message and stack trace to the current position in the code. + * + * ```js + * console.trace('Show me'); + * // Prints: (stack trace will vary based on where trace is called) + * // Trace: Show me + * // at repl:2:9 + * // at REPLServer.defaultEval (repl.js:248:27) + * // at bound (domain.js:287:14) + * // at REPLServer.runBound [as eval] (domain.js:300:12) + * // at REPLServer. (repl.js:412:12) + * // at emitOne (events.js:82:20) + * // at REPLServer.emit (events.js:169:7) + * // at REPLServer.Interface._onLine (readline.js:210:10) + * // at REPLServer.Interface._line (readline.js:549:8) + * // at REPLServer.Interface._ttyWrite (readline.js:826:14) + * ``` + * @since v0.1.104 + */ + trace(message?: any, ...optionalParams: any[]): void; + /** + * The `console.warn()` function is an alias for {@link error}. + * @since v0.1.100 + */ + warn(message?: any, ...optionalParams: any[]): void; + // --- Inspector mode only --- + /** + * This method does not display anything unless used in the inspector. + * Starts a JavaScript CPU profile with an optional label. + */ + profile(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. + */ + profileEnd(label?: string): void; + /** + * This method does not display anything unless used in the inspector. + * Adds an event with the label `label` to the Timeline panel of the inspector. + */ + timeStamp(label?: string): void; + } + /** + * The `console` module provides a simple debugging console that is similar to the + * JavaScript console mechanism provided by web browsers. + * + * The module exports two specific components: + * + * * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream. + * * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`. + * + * _**Warning**_: The global console object's methods are neither consistently + * synchronous like the browser APIs they resemble, nor are they consistently + * asynchronous like all other Node.js streams. See the `note on process I/O` for + * more information. + * + * Example using the global `console`: + * + * ```js + * console.log('hello world'); + * // Prints: hello world, to stdout + * console.log('hello %s', 'world'); + * // Prints: hello world, to stdout + * console.error(new Error('Whoops, something bad happened')); + * // Prints error message and stack trace to stderr: + * // Error: Whoops, something bad happened + * // at [eval]:5:15 + * // at Script.runInThisContext (node:vm:132:18) + * // at Object.runInThisContext (node:vm:309:38) + * // at node:internal/process/execution:77:19 + * // at [eval]-wrapper:6:22 + * // at evalScript (node:internal/process/execution:76:60) + * // at node:internal/main/eval_string:23:3 + * + * const name = 'Will Robinson'; + * console.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to stderr + * ``` + * + * Example using the `Console` class: + * + * ```js + * const out = getStreamSomehow(); + * const err = getStreamSomehow(); + * const myConsole = new console.Console(out, err); + * + * myConsole.log('hello world'); + * // Prints: hello world, to out + * myConsole.log('hello %s', 'world'); + * // Prints: hello world, to out + * myConsole.error(new Error('Whoops, something bad happened')); + * // Prints: [Error: Whoops, something bad happened], to err + * + * const name = 'Will Robinson'; + * myConsole.warn(`Danger ${name}! Danger!`); + * // Prints: Danger Will Robinson! Danger!, to err + * ``` + * @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js) + */ + namespace console { + interface ConsoleConstructorOptions { + stdout: NodeJS.WritableStream; + stderr?: NodeJS.WritableStream | undefined; + ignoreErrors?: boolean | undefined; + colorMode?: boolean | 'auto' | undefined; + inspectOptions?: InspectOptions | undefined; + /** + * Set group indentation + * @default 2 + */ + groupIndentation?: number | undefined; + } + interface ConsoleConstructor { + prototype: Console; + new (stdout: NodeJS.WritableStream, stderr?: NodeJS.WritableStream, ignoreErrors?: boolean): Console; + new (options: ConsoleConstructorOptions): Console; + } + } + var console: Console; + } + export = globalThis.console; +} diff --git a/node_modules/@types/node/ts4.8/constants.d.ts b/node_modules/@types/node/ts4.8/constants.d.ts new file mode 100755 index 0000000..208020d --- /dev/null +++ b/node_modules/@types/node/ts4.8/constants.d.ts @@ -0,0 +1,18 @@ +/** @deprecated since v6.3.0 - use constants property exposed by the relevant module instead. */ +declare module 'constants' { + import { constants as osConstants, SignalConstants } from 'node:os'; + import { constants as cryptoConstants } from 'node:crypto'; + import { constants as fsConstants } from 'node:fs'; + + const exp: typeof osConstants.errno & + typeof osConstants.priority & + SignalConstants & + typeof cryptoConstants & + typeof fsConstants; + export = exp; +} + +declare module 'node:constants' { + import constants = require('constants'); + export = constants; +} diff --git a/node_modules/@types/node/ts4.8/crypto.d.ts b/node_modules/@types/node/ts4.8/crypto.d.ts new file mode 100755 index 0000000..4f61974 --- /dev/null +++ b/node_modules/@types/node/ts4.8/crypto.d.ts @@ -0,0 +1,3964 @@ +/** + * The `crypto` module provides cryptographic functionality that includes a set of + * wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify functions. + * + * ```js + * const { createHmac } = await import('crypto'); + * + * const secret = 'abcdefg'; + * const hash = createHmac('sha256', secret) + * .update('I love cupcakes') + * .digest('hex'); + * console.log(hash); + * // Prints: + * // c0fa1bc00531bd78ef38c628449c5102aeabd49b5dc3a2a516ea6ea959d6658e + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/crypto.js) + */ +declare module 'crypto' { + import * as stream from 'node:stream'; + import { PeerCertificate } from 'node:tls'; + /** + * SPKAC is a Certificate Signing Request mechanism originally implemented by + * Netscape and was specified formally as part of [HTML5's `keygen` element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/keygen). + * + * `` is deprecated since [HTML 5.2](https://www.w3.org/TR/html52/changes.html#features-removed) and new projects + * should not use this element anymore. + * + * The `crypto` module provides the `Certificate` class for working with SPKAC + * data. The most common usage is handling output generated by the HTML5`` element. Node.js uses [OpenSSL's SPKAC + * implementation](https://www.openssl.org/docs/man1.1.0/apps/openssl-spkac.html) internally. + * @since v0.11.8 + */ + class Certificate { + /** + * ```js + * const { Certificate } = await import('crypto'); + * const spkac = getSpkacSomehow(); + * const challenge = Certificate.exportChallenge(spkac); + * console.log(challenge.toString('utf8')); + * // Prints: the challenge as a UTF8 string + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The challenge component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportChallenge(spkac: BinaryLike): Buffer; + /** + * ```js + * const { Certificate } = await import('crypto'); + * const spkac = getSpkacSomehow(); + * const publicKey = Certificate.exportPublicKey(spkac); + * console.log(publicKey); + * // Prints: the public key as + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return The public key component of the `spkac` data structure, which includes a public key and a challenge. + */ + static exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * ```js + * import { Buffer } from 'buffer'; + * const { Certificate } = await import('crypto'); + * + * const spkac = getSpkacSomehow(); + * console.log(Certificate.verifySpkac(Buffer.from(spkac))); + * // Prints: true or false + * ``` + * @since v9.0.0 + * @param encoding The `encoding` of the `spkac` string. + * @return `true` if the given `spkac` data structure is valid, `false` otherwise. + */ + static verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + /** + * @deprecated + * @param spkac + * @returns The challenge component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportChallenge(spkac: BinaryLike): Buffer; + /** + * @deprecated + * @param spkac + * @param encoding The encoding of the spkac string. + * @returns The public key component of the `spkac` data structure, + * which includes a public key and a challenge. + */ + exportPublicKey(spkac: BinaryLike, encoding?: string): Buffer; + /** + * @deprecated + * @param spkac + * @returns `true` if the given `spkac` data structure is valid, + * `false` otherwise. + */ + verifySpkac(spkac: NodeJS.ArrayBufferView): boolean; + } + namespace constants { + // https://nodejs.org/dist/latest-v10.x/docs/api/crypto.html#crypto_crypto_constants + const OPENSSL_VERSION_NUMBER: number; + /** Applies multiple bug workarounds within OpenSSL. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html for detail. */ + const SSL_OP_ALL: number; + /** Allows legacy insecure renegotiation between OpenSSL and unpatched clients or servers. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; + /** Attempts to use the server's preferences instead of the client's when selecting a cipher. See https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_options.html. */ + const SSL_OP_CIPHER_SERVER_PREFERENCE: number; + /** Instructs OpenSSL to use Cisco's "speshul" version of DTLS_BAD_VER. */ + const SSL_OP_CISCO_ANYCONNECT: number; + /** Instructs OpenSSL to turn on cookie exchange. */ + const SSL_OP_COOKIE_EXCHANGE: number; + /** Instructs OpenSSL to add server-hello extension from an early version of the cryptopro draft. */ + const SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; + /** Instructs OpenSSL to disable a SSL 3.0/TLS 1.0 vulnerability workaround added in OpenSSL 0.9.6d. */ + const SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; + /** Instructs OpenSSL to always use the tmp_rsa key when performing RSA operations. */ + const SSL_OP_EPHEMERAL_RSA: number; + /** Allows initial connection to servers that do not support RI. */ + const SSL_OP_LEGACY_SERVER_CONNECT: number; + const SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number; + const SSL_OP_MICROSOFT_SESS_ID_BUG: number; + /** Instructs OpenSSL to disable the workaround for a man-in-the-middle protocol-version vulnerability in the SSL 2.0 server implementation. */ + const SSL_OP_MSIE_SSLV2_RSA_PADDING: number; + const SSL_OP_NETSCAPE_CA_DN_BUG: number; + const SSL_OP_NETSCAPE_CHALLENGE_BUG: number; + const SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number; + const SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number; + /** Instructs OpenSSL to disable support for SSL/TLS compression. */ + const SSL_OP_NO_COMPRESSION: number; + const SSL_OP_NO_QUERY_MTU: number; + /** Instructs OpenSSL to always start a new session when performing renegotiation. */ + const SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; + const SSL_OP_NO_SSLv2: number; + const SSL_OP_NO_SSLv3: number; + const SSL_OP_NO_TICKET: number; + const SSL_OP_NO_TLSv1: number; + const SSL_OP_NO_TLSv1_1: number; + const SSL_OP_NO_TLSv1_2: number; + const SSL_OP_PKCS1_CHECK_1: number; + const SSL_OP_PKCS1_CHECK_2: number; + /** Instructs OpenSSL to always create a new key when using temporary/ephemeral DH parameters. */ + const SSL_OP_SINGLE_DH_USE: number; + /** Instructs OpenSSL to always create a new key when using temporary/ephemeral ECDH parameters. */ + const SSL_OP_SINGLE_ECDH_USE: number; + const SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number; + const SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number; + const SSL_OP_TLS_BLOCK_PADDING_BUG: number; + const SSL_OP_TLS_D5_BUG: number; + /** Instructs OpenSSL to disable version rollback attack detection. */ + const SSL_OP_TLS_ROLLBACK_BUG: number; + const ENGINE_METHOD_RSA: number; + const ENGINE_METHOD_DSA: number; + const ENGINE_METHOD_DH: number; + const ENGINE_METHOD_RAND: number; + const ENGINE_METHOD_EC: number; + const ENGINE_METHOD_CIPHERS: number; + const ENGINE_METHOD_DIGESTS: number; + const ENGINE_METHOD_PKEY_METHS: number; + const ENGINE_METHOD_PKEY_ASN1_METHS: number; + const ENGINE_METHOD_ALL: number; + const ENGINE_METHOD_NONE: number; + const DH_CHECK_P_NOT_SAFE_PRIME: number; + const DH_CHECK_P_NOT_PRIME: number; + const DH_UNABLE_TO_CHECK_GENERATOR: number; + const DH_NOT_SUITABLE_GENERATOR: number; + const ALPN_ENABLED: number; + const RSA_PKCS1_PADDING: number; + const RSA_SSLV23_PADDING: number; + const RSA_NO_PADDING: number; + const RSA_PKCS1_OAEP_PADDING: number; + const RSA_X931_PADDING: number; + const RSA_PKCS1_PSS_PADDING: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the digest size when signing or verifying. */ + const RSA_PSS_SALTLEN_DIGEST: number; + /** Sets the salt length for RSA_PKCS1_PSS_PADDING to the maximum permissible value when signing data. */ + const RSA_PSS_SALTLEN_MAX_SIGN: number; + /** Causes the salt length for RSA_PKCS1_PSS_PADDING to be determined automatically when verifying a signature. */ + const RSA_PSS_SALTLEN_AUTO: number; + const POINT_CONVERSION_COMPRESSED: number; + const POINT_CONVERSION_UNCOMPRESSED: number; + const POINT_CONVERSION_HYBRID: number; + /** Specifies the built-in default cipher list used by Node.js (colon-separated values). */ + const defaultCoreCipherList: string; + /** Specifies the active default cipher list used by the current Node.js process (colon-separated values). */ + const defaultCipherList: string; + } + interface HashOptions extends stream.TransformOptions { + /** + * For XOF hash functions such as `shake256`, the + * outputLength option can be used to specify the desired output length in bytes. + */ + outputLength?: number | undefined; + } + /** @deprecated since v10.0.0 */ + const fips: boolean; + /** + * Creates and returns a `Hash` object that can be used to generate hash digests + * using the given `algorithm`. Optional `options` argument controls stream + * behavior. For XOF hash functions such as `'shake256'`, the `outputLength` option + * can be used to specify the desired output length in bytes. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * Example: generating the sha256 sum of a file + * + * ```js + * import { + * createReadStream + * } from 'fs'; + * import { argv } from 'process'; + * const { + * createHash + * } = await import('crypto'); + * + * const filename = argv[2]; + * + * const hash = createHash('sha256'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hash.update(data); + * else { + * console.log(`${hash.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.92 + * @param options `stream.transform` options + */ + function createHash(algorithm: string, options?: HashOptions): Hash; + /** + * Creates and returns an `Hmac` object that uses the given `algorithm` and `key`. + * Optional `options` argument controls stream behavior. + * + * The `algorithm` is dependent on the available algorithms supported by the + * version of OpenSSL on the platform. Examples are `'sha256'`, `'sha512'`, etc. + * On recent releases of OpenSSL, `openssl list -digest-algorithms` will + * display the available digest algorithms. + * + * The `key` is the HMAC key used to generate the cryptographic HMAC hash. If it is + * a `KeyObject`, its type must be `secret`. + * + * Example: generating the sha256 HMAC of a file + * + * ```js + * import { + * createReadStream + * } from 'fs'; + * import { argv } from 'process'; + * const { + * createHmac + * } = await import('crypto'); + * + * const filename = argv[2]; + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream(filename); + * input.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = input.read(); + * if (data) + * hmac.update(data); + * else { + * console.log(`${hmac.digest('hex')} ${filename}`); + * } + * }); + * ``` + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createHmac(algorithm: string, key: BinaryLike | KeyObject, options?: stream.TransformOptions): Hmac; + // https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings + type BinaryToTextEncoding = 'base64' | 'base64url' | 'hex' | 'binary'; + type CharacterEncoding = 'utf8' | 'utf-8' | 'utf16le' | 'latin1'; + type LegacyCharacterEncoding = 'ascii' | 'binary' | 'ucs2' | 'ucs-2'; + type Encoding = BinaryToTextEncoding | CharacterEncoding | LegacyCharacterEncoding; + type ECDHKeyFormat = 'compressed' | 'uncompressed' | 'hybrid'; + /** + * The `Hash` class is a utility for creating hash digests of data. It can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed hash digest on the readable side, or + * * Using the `hash.update()` and `hash.digest()` methods to produce the + * computed hash. + * + * The {@link createHash} method is used to create `Hash` instances. `Hash`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hash` objects as streams: + * + * ```js + * const { + * createHash + * } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * hash.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hash.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * } + * }); + * + * hash.write('some data to hash'); + * hash.end(); + * ``` + * + * Example: Using `Hash` and piped streams: + * + * ```js + * import { createReadStream } from 'fs'; + * import { stdout } from 'process'; + * const { createHash } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * const input = createReadStream('test.js'); + * input.pipe(hash).setEncoding('hex').pipe(stdout); + * ``` + * + * Example: Using the `hash.update()` and `hash.digest()` methods: + * + * ```js + * const { + * createHash + * } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('some data to hash'); + * console.log(hash.digest('hex')); + * // Prints: + * // 6a2da20943931e9834fc12cfe5bb47bbd9ae43489a30726962b576f4e3993e50 + * ``` + * @since v0.1.92 + */ + class Hash extends stream.Transform { + private constructor(); + /** + * Creates a new `Hash` object that contains a deep copy of the internal state + * of the current `Hash` object. + * + * The optional `options` argument controls stream behavior. For XOF hash + * functions such as `'shake256'`, the `outputLength` option can be used to + * specify the desired output length in bytes. + * + * An error is thrown when an attempt is made to copy the `Hash` object after + * its `hash.digest()` method has been called. + * + * ```js + * // Calculate a rolling hash. + * const { + * createHash + * } = await import('crypto'); + * + * const hash = createHash('sha256'); + * + * hash.update('one'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('two'); + * console.log(hash.copy().digest('hex')); + * + * hash.update('three'); + * console.log(hash.copy().digest('hex')); + * + * // Etc. + * ``` + * @since v13.1.0 + * @param options `stream.transform` options + */ + copy(options?: stream.TransformOptions): Hash; + /** + * Updates the hash content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hash; + update(data: string, inputEncoding: Encoding): Hash; + /** + * Calculates the digest of all of the data passed to be hashed (using the `hash.update()` method). + * If `encoding` is provided a string will be returned; otherwise + * a `Buffer` is returned. + * + * The `Hash` object can not be used again after `hash.digest()` method has been + * called. Multiple calls will cause an error to be thrown. + * @since v0.1.92 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + /** + * The `Hmac` class is a utility for creating cryptographic HMAC digests. It can + * be used in one of two ways: + * + * * As a `stream` that is both readable and writable, where data is written + * to produce a computed HMAC digest on the readable side, or + * * Using the `hmac.update()` and `hmac.digest()` methods to produce the + * computed HMAC digest. + * + * The {@link createHmac} method is used to create `Hmac` instances. `Hmac`objects are not to be created directly using the `new` keyword. + * + * Example: Using `Hmac` objects as streams: + * + * ```js + * const { + * createHmac + * } = await import('crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.on('readable', () => { + * // Only one element is going to be produced by the + * // hash stream. + * const data = hmac.read(); + * if (data) { + * console.log(data.toString('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * } + * }); + * + * hmac.write('some data to hash'); + * hmac.end(); + * ``` + * + * Example: Using `Hmac` and piped streams: + * + * ```js + * import { createReadStream } from 'fs'; + * import { stdout } from 'process'; + * const { + * createHmac + * } = await import('crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * const input = createReadStream('test.js'); + * input.pipe(hmac).pipe(stdout); + * ``` + * + * Example: Using the `hmac.update()` and `hmac.digest()` methods: + * + * ```js + * const { + * createHmac + * } = await import('crypto'); + * + * const hmac = createHmac('sha256', 'a secret'); + * + * hmac.update('some data to hash'); + * console.log(hmac.digest('hex')); + * // Prints: + * // 7fd04df92f636fd450bc841c9418e5825c17f33ad9c87c518115a45971f7f77e + * ``` + * @since v0.1.94 + */ + class Hmac extends stream.Transform { + private constructor(); + /** + * Updates the `Hmac` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Hmac; + update(data: string, inputEncoding: Encoding): Hmac; + /** + * Calculates the HMAC digest of all of the data passed using `hmac.update()`. + * If `encoding` is + * provided a string is returned; otherwise a `Buffer` is returned; + * + * The `Hmac` object can not be used again after `hmac.digest()` has been + * called. Multiple calls to `hmac.digest()` will result in an error being thrown. + * @since v0.1.94 + * @param encoding The `encoding` of the return value. + */ + digest(): Buffer; + digest(encoding: BinaryToTextEncoding): string; + } + type KeyObjectType = 'secret' | 'public' | 'private'; + interface KeyExportOptions { + type: 'pkcs1' | 'spki' | 'pkcs8' | 'sec1'; + format: T; + cipher?: string | undefined; + passphrase?: string | Buffer | undefined; + } + interface JwkKeyExportOptions { + format: 'jwk'; + } + interface JsonWebKey { + crv?: string | undefined; + d?: string | undefined; + dp?: string | undefined; + dq?: string | undefined; + e?: string | undefined; + k?: string | undefined; + kty?: string | undefined; + n?: string | undefined; + p?: string | undefined; + q?: string | undefined; + qi?: string | undefined; + x?: string | undefined; + y?: string | undefined; + [key: string]: unknown; + } + interface AsymmetricKeyDetails { + /** + * Key size in bits (RSA, DSA). + */ + modulusLength?: number | undefined; + /** + * Public exponent (RSA). + */ + publicExponent?: bigint | undefined; + /** + * Name of the message digest (RSA-PSS). + */ + hashAlgorithm?: string | undefined; + /** + * Name of the message digest used by MGF1 (RSA-PSS). + */ + mgf1HashAlgorithm?: string | undefined; + /** + * Minimal salt length in bytes (RSA-PSS). + */ + saltLength?: number | undefined; + /** + * Size of q in bits (DSA). + */ + divisorLength?: number | undefined; + /** + * Name of the curve (EC). + */ + namedCurve?: string | undefined; + } + /** + * Node.js uses a `KeyObject` class to represent a symmetric or asymmetric key, + * and each kind of key exposes different functions. The {@link createSecretKey}, {@link createPublicKey} and {@link createPrivateKey} methods are used to create `KeyObject`instances. `KeyObject` + * objects are not to be created directly using the `new`keyword. + * + * Most applications should consider using the new `KeyObject` API instead of + * passing keys as strings or `Buffer`s due to improved security features. + * + * `KeyObject` instances can be passed to other threads via `postMessage()`. + * The receiver obtains a cloned `KeyObject`, and the `KeyObject` does not need to + * be listed in the `transferList` argument. + * @since v11.6.0 + */ + class KeyObject { + private constructor(); + /** + * Example: Converting a `CryptoKey` instance to a `KeyObject`: + * + * ```js + * const { webcrypto, KeyObject } = await import('crypto'); + * const { subtle } = webcrypto; + * + * const key = await subtle.generateKey({ + * name: 'HMAC', + * hash: 'SHA-256', + * length: 256 + * }, true, ['sign', 'verify']); + * + * const keyObject = KeyObject.from(key); + * console.log(keyObject.symmetricKeySize); + * // Prints: 32 (symmetric key size in bytes) + * ``` + * @since v15.0.0 + */ + static from(key: webcrypto.CryptoKey): KeyObject; + /** + * For asymmetric keys, this property represents the type of the key. Supported key + * types are: + * + * * `'rsa'` (OID 1.2.840.113549.1.1.1) + * * `'rsa-pss'` (OID 1.2.840.113549.1.1.10) + * * `'dsa'` (OID 1.2.840.10040.4.1) + * * `'ec'` (OID 1.2.840.10045.2.1) + * * `'x25519'` (OID 1.3.101.110) + * * `'x448'` (OID 1.3.101.111) + * * `'ed25519'` (OID 1.3.101.112) + * * `'ed448'` (OID 1.3.101.113) + * * `'dh'` (OID 1.2.840.113549.1.3.1) + * + * This property is `undefined` for unrecognized `KeyObject` types and symmetric + * keys. + * @since v11.6.0 + */ + asymmetricKeyType?: KeyType | undefined; + /** + * For asymmetric keys, this property represents the size of the embedded key in + * bytes. This property is `undefined` for symmetric keys. + */ + asymmetricKeySize?: number | undefined; + /** + * This property exists only on asymmetric keys. Depending on the type of the key, + * this object contains information about the key. None of the information obtained + * through this property can be used to uniquely identify a key or to compromise + * the security of the key. + * + * For RSA-PSS keys, if the key material contains a `RSASSA-PSS-params` sequence, + * the `hashAlgorithm`, `mgf1HashAlgorithm`, and `saltLength` properties will be + * set. + * + * Other key details might be exposed via this API using additional attributes. + * @since v15.7.0 + */ + asymmetricKeyDetails?: AsymmetricKeyDetails | undefined; + /** + * For symmetric keys, the following encoding options can be used: + * + * For public keys, the following encoding options can be used: + * + * For private keys, the following encoding options can be used: + * + * The result type depends on the selected encoding format, when PEM the + * result is a string, when DER it will be a buffer containing the data + * encoded as DER, when [JWK](https://tools.ietf.org/html/rfc7517) it will be an object. + * + * When [JWK](https://tools.ietf.org/html/rfc7517) encoding format was selected, all other encoding options are + * ignored. + * + * PKCS#1, SEC1, and PKCS#8 type keys can be encrypted by using a combination of + * the `cipher` and `format` options. The PKCS#8 `type` can be used with any`format` to encrypt any key algorithm (RSA, EC, or DH) by specifying a`cipher`. PKCS#1 and SEC1 can only be + * encrypted by specifying a `cipher`when the PEM `format` is used. For maximum compatibility, use PKCS#8 for + * encrypted private keys. Since PKCS#8 defines its own + * encryption mechanism, PEM-level encryption is not supported when encrypting + * a PKCS#8 key. See [RFC 5208](https://www.rfc-editor.org/rfc/rfc5208.txt) for PKCS#8 encryption and [RFC 1421](https://www.rfc-editor.org/rfc/rfc1421.txt) for + * PKCS#1 and SEC1 encryption. + * @since v11.6.0 + */ + export(options: KeyExportOptions<'pem'>): string | Buffer; + export(options?: KeyExportOptions<'der'>): Buffer; + export(options?: JwkKeyExportOptions): JsonWebKey; + /** + * For secret keys, this property represents the size of the key in bytes. This + * property is `undefined` for asymmetric keys. + * @since v11.6.0 + */ + symmetricKeySize?: number | undefined; + /** + * Depending on the type of this `KeyObject`, this property is either`'secret'` for secret (symmetric) keys, `'public'` for public (asymmetric) keys + * or `'private'` for private (asymmetric) keys. + * @since v11.6.0 + */ + type: KeyObjectType; + } + type CipherCCMTypes = 'aes-128-ccm' | 'aes-192-ccm' | 'aes-256-ccm' | 'chacha20-poly1305'; + type CipherGCMTypes = 'aes-128-gcm' | 'aes-192-gcm' | 'aes-256-gcm'; + type CipherOCBTypes = 'aes-128-ocb' | 'aes-192-ocb' | 'aes-256-ocb'; + type BinaryLike = string | NodeJS.ArrayBufferView; + type CipherKey = BinaryLike | KeyObject; + interface CipherCCMOptions extends stream.TransformOptions { + authTagLength: number; + } + interface CipherGCMOptions extends stream.TransformOptions { + authTagLength?: number | undefined; + } + interface CipherOCBOptions extends stream.TransformOptions { + authTagLength: number; + } + /** + * Creates and returns a `Cipher` object that uses the given `algorithm` and`password`. + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `password` is used to derive the cipher key and initialization vector (IV). + * The value must be either a `'latin1'` encoded string, a `Buffer`, a`TypedArray`, or a `DataView`. + * + * The implementation of `crypto.createCipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createCipheriv} to create the `Cipher` object. Users should not use ciphers with counter mode + * (e.g. CTR, GCM, or CCM) in `crypto.createCipher()`. A warning is emitted when + * they are used in order to avoid the risk of IV reuse that causes + * vulnerabilities. For the case when IV is reused in GCM, see [Nonce-Disrespecting Adversaries](https://github.com/nonce-disrespect/nonce-disrespect) for details. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createCipheriv} instead. + * @param options `stream.transform` options + */ + function createCipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): CipherCCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): CipherGCM; + /** @deprecated since v10.0.0 use `createCipheriv()` */ + function createCipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Cipher; + /** + * Creates and returns a `Cipher` object, with the given `algorithm`, `key` and + * initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to set the length of the authentication + * tag that will be returned by `getAuthTag()` and defaults to 16 bytes. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a + * given IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createCipheriv(algorithm: CipherCCMTypes, key: CipherKey, iv: BinaryLike, options: CipherCCMOptions): CipherCCM; + function createCipheriv(algorithm: CipherOCBTypes, key: CipherKey, iv: BinaryLike, options: CipherOCBOptions): CipherOCB; + function createCipheriv(algorithm: CipherGCMTypes, key: CipherKey, iv: BinaryLike, options?: CipherGCMOptions): CipherGCM; + function createCipheriv(algorithm: string, key: CipherKey, iv: BinaryLike | null, options?: stream.TransformOptions): Cipher; + /** + * Instances of the `Cipher` class are used to encrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain unencrypted + * data is written to produce encrypted data on the readable side, or + * * Using the `cipher.update()` and `cipher.final()` methods to produce + * the encrypted data. + * + * The {@link createCipher} or {@link createCipheriv} methods are + * used to create `Cipher` instances. `Cipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Cipher` objects as streams: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * // Once we have the key and iv, we can create and use the cipher... + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = ''; + * cipher.setEncoding('hex'); + * + * cipher.on('data', (chunk) => encrypted += chunk); + * cipher.on('end', () => console.log(encrypted)); + * + * cipher.write('some clear text data'); + * cipher.end(); + * }); + * }); + * ``` + * + * Example: Using `Cipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'fs'; + * + * import { + * pipeline + * } from 'stream'; + * + * const { + * scrypt, + * randomFill, + * createCipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.js'); + * const output = createWriteStream('test.enc'); + * + * pipeline(input, cipher, output, (err) => { + * if (err) throw err; + * }); + * }); + * }); + * ``` + * + * Example: Using the `cipher.update()` and `cipher.final()` methods: + * + * ```js + * const { + * scrypt, + * randomFill, + * createCipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * + * // First, we'll generate the key. The key length is dependent on the algorithm. + * // In this case for aes192, it is 24 bytes (192 bits). + * scrypt(password, 'salt', 24, (err, key) => { + * if (err) throw err; + * // Then, we'll generate a random initialization vector + * randomFill(new Uint8Array(16), (err, iv) => { + * if (err) throw err; + * + * const cipher = createCipheriv(algorithm, key, iv); + * + * let encrypted = cipher.update('some clear text data', 'utf8', 'hex'); + * encrypted += cipher.final('hex'); + * console.log(encrypted); + * }); + * }); + * ``` + * @since v0.1.94 + */ + class Cipher extends stream.Transform { + private constructor(); + /** + * Updates the cipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`, `TypedArray`, or`DataView`. If `data` is a `Buffer`, + * `TypedArray`, or `DataView`, then`inputEncoding` is ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `cipher.update()` method can be called multiple times with new data until `cipher.final()` is called. Calling `cipher.update()` after `cipher.final()` will result in an error being + * thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the data. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: BinaryLike): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `cipher.final()` method has been called, the `Cipher` object can no + * longer be used to encrypt data. Attempts to call `cipher.final()` more than + * once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining enciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When using block encryption algorithms, the `Cipher` class will automatically + * add padding to the input data to the appropriate block size. To disable the + * default padding call `cipher.setAutoPadding(false)`. + * + * When `autoPadding` is `false`, the length of the entire input data must be a + * multiple of the cipher's block size or `cipher.final()` will throw an error. + * Disabling automatic padding is useful for non-standard padding, for instance + * using `0x0` instead of PKCS padding. + * + * The `cipher.setAutoPadding()` method must be called before `cipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(autoPadding?: boolean): this; + } + interface CipherCCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + } + ): this; + getAuthTag(): Buffer; + } + interface CipherGCM extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + getAuthTag(): Buffer; + } + interface CipherOCB extends Cipher { + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + getAuthTag(): Buffer; + } + /** + * Creates and returns a `Decipher` object that uses the given `algorithm` and`password` (key). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The implementation of `crypto.createDecipher()` derives keys using the OpenSSL + * function [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) with the digest algorithm set to MD5, one + * iteration, and no salt. The lack of salt allows dictionary attacks as the same + * password always creates the same key. The low iteration count and + * non-cryptographically secure hash algorithm allow passwords to be tested very + * rapidly. + * + * In line with OpenSSL's recommendation to use a more modern algorithm instead of [`EVP_BytesToKey`](https://www.openssl.org/docs/man1.1.0/crypto/EVP_BytesToKey.html) it is recommended that + * developers derive a key and IV on + * their own using {@link scrypt} and to use {@link createDecipheriv} to create the `Decipher` object. + * @since v0.1.94 + * @deprecated Since v10.0.0 - Use {@link createDecipheriv} instead. + * @param options `stream.transform` options + */ + function createDecipher(algorithm: CipherCCMTypes, password: BinaryLike, options: CipherCCMOptions): DecipherCCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: CipherGCMTypes, password: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + /** @deprecated since v10.0.0 use `createDecipheriv()` */ + function createDecipher(algorithm: string, password: BinaryLike, options?: stream.TransformOptions): Decipher; + /** + * Creates and returns a `Decipher` object that uses the given `algorithm`, `key`and initialization vector (`iv`). + * + * The `options` argument controls stream behavior and is optional except when a + * cipher in CCM or OCB mode (e.g. `'aes-128-ccm'`) is used. In that case, the`authTagLength` option is required and specifies the length of the + * authentication tag in bytes, see `CCM mode`. In GCM mode, the `authTagLength`option is not required but can be used to restrict accepted authentication tags + * to those with the specified length. + * For `chacha20-poly1305`, the `authTagLength` option defaults to 16 bytes. + * + * The `algorithm` is dependent on OpenSSL, examples are `'aes192'`, etc. On + * recent OpenSSL releases, `openssl list -cipher-algorithms` will + * display the available cipher algorithms. + * + * The `key` is the raw key used by the `algorithm` and `iv` is an [initialization vector](https://en.wikipedia.org/wiki/Initialization_vector). Both arguments must be `'utf8'` encoded + * strings,`Buffers`, `TypedArray`, or `DataView`s. The `key` may optionally be + * a `KeyObject` of type `secret`. If the cipher does not need + * an initialization vector, `iv` may be `null`. + * + * When passing strings for `key` or `iv`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * Initialization vectors should be unpredictable and unique; ideally, they will be + * cryptographically random. They do not have to be secret: IVs are typically just + * added to ciphertext messages unencrypted. It may sound contradictory that + * something has to be unpredictable and unique, but does not have to be secret; + * remember that an attacker must not be able to predict ahead of time what a given + * IV will be. + * @since v0.1.94 + * @param options `stream.transform` options + */ + function createDecipheriv(algorithm: CipherCCMTypes, key: CipherKey, iv: BinaryLike, options: CipherCCMOptions): DecipherCCM; + function createDecipheriv(algorithm: CipherOCBTypes, key: CipherKey, iv: BinaryLike, options: CipherOCBOptions): DecipherOCB; + function createDecipheriv(algorithm: CipherGCMTypes, key: CipherKey, iv: BinaryLike, options?: CipherGCMOptions): DecipherGCM; + function createDecipheriv(algorithm: string, key: CipherKey, iv: BinaryLike | null, options?: stream.TransformOptions): Decipher; + /** + * Instances of the `Decipher` class are used to decrypt data. The class can be + * used in one of two ways: + * + * * As a `stream` that is both readable and writable, where plain encrypted + * data is written to produce unencrypted data on the readable side, or + * * Using the `decipher.update()` and `decipher.final()` methods to + * produce the unencrypted data. + * + * The {@link createDecipher} or {@link createDecipheriv} methods are + * used to create `Decipher` instances. `Decipher` objects are not to be created + * directly using the `new` keyword. + * + * Example: Using `Decipher` objects as streams: + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * scryptSync, + * createDecipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Key length is dependent on the algorithm. In this case for aes192, it is + * // 24 bytes (192 bits). + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * let decrypted = ''; + * decipher.on('readable', () => { + * while (null !== (chunk = decipher.read())) { + * decrypted += chunk.toString('utf8'); + * } + * }); + * decipher.on('end', () => { + * console.log(decrypted); + * // Prints: some clear text data + * }); + * + * // Encrypted with same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * decipher.write(encrypted, 'hex'); + * decipher.end(); + * ``` + * + * Example: Using `Decipher` and piped streams: + * + * ```js + * import { + * createReadStream, + * createWriteStream, + * } from 'fs'; + * import { Buffer } from 'buffer'; + * const { + * scryptSync, + * createDecipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * const input = createReadStream('test.enc'); + * const output = createWriteStream('test.js'); + * + * input.pipe(decipher).pipe(output); + * ``` + * + * Example: Using the `decipher.update()` and `decipher.final()` methods: + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * scryptSync, + * createDecipheriv + * } = await import('crypto'); + * + * const algorithm = 'aes-192-cbc'; + * const password = 'Password used to generate key'; + * // Use the async `crypto.scrypt()` instead. + * const key = scryptSync(password, 'salt', 24); + * // The IV is usually passed along with the ciphertext. + * const iv = Buffer.alloc(16, 0); // Initialization vector. + * + * const decipher = createDecipheriv(algorithm, key, iv); + * + * // Encrypted using same algorithm, key and iv. + * const encrypted = + * 'e5f79c5915c02171eec6b212d5520d44480993d7d622a7c4c2da32f6efda0ffa'; + * let decrypted = decipher.update(encrypted, 'hex', 'utf8'); + * decrypted += decipher.final('utf8'); + * console.log(decrypted); + * // Prints: some clear text data + * ``` + * @since v0.1.94 + */ + class Decipher extends stream.Transform { + private constructor(); + /** + * Updates the decipher with `data`. If the `inputEncoding` argument is given, + * the `data`argument is a string using the specified encoding. If the `inputEncoding`argument is not given, `data` must be a `Buffer`. If `data` is a `Buffer` then `inputEncoding` is + * ignored. + * + * The `outputEncoding` specifies the output format of the enciphered + * data. If the `outputEncoding`is specified, a string using the specified encoding is returned. If no`outputEncoding` is provided, a `Buffer` is returned. + * + * The `decipher.update()` method can be called multiple times with new data until `decipher.final()` is called. Calling `decipher.update()` after `decipher.final()` will result in an error + * being thrown. + * @since v0.1.94 + * @param inputEncoding The `encoding` of the `data` string. + * @param outputEncoding The `encoding` of the return value. + */ + update(data: NodeJS.ArrayBufferView): Buffer; + update(data: string, inputEncoding: Encoding): Buffer; + update(data: NodeJS.ArrayBufferView, inputEncoding: undefined, outputEncoding: Encoding): string; + update(data: string, inputEncoding: Encoding | undefined, outputEncoding: Encoding): string; + /** + * Once the `decipher.final()` method has been called, the `Decipher` object can + * no longer be used to decrypt data. Attempts to call `decipher.final()` more + * than once will result in an error being thrown. + * @since v0.1.94 + * @param outputEncoding The `encoding` of the return value. + * @return Any remaining deciphered contents. If `outputEncoding` is specified, a string is returned. If an `outputEncoding` is not provided, a {@link Buffer} is returned. + */ + final(): Buffer; + final(outputEncoding: BufferEncoding): string; + /** + * When data has been encrypted without standard block padding, calling`decipher.setAutoPadding(false)` will disable automatic padding to prevent `decipher.final()` from checking for and + * removing padding. + * + * Turning auto padding off will only work if the input data's length is a + * multiple of the ciphers block size. + * + * The `decipher.setAutoPadding()` method must be called before `decipher.final()`. + * @since v0.7.1 + * @param [autoPadding=true] + * @return for method chaining. + */ + setAutoPadding(auto_padding?: boolean): this; + } + interface DecipherCCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options: { + plaintextLength: number; + } + ): this; + } + interface DecipherGCM extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + } + interface DecipherOCB extends Decipher { + setAuthTag(buffer: NodeJS.ArrayBufferView): this; + setAAD( + buffer: NodeJS.ArrayBufferView, + options?: { + plaintextLength: number; + } + ): this; + } + interface PrivateKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: 'pkcs1' | 'pkcs8' | 'sec1' | undefined; + passphrase?: string | Buffer | undefined; + } + interface PublicKeyInput { + key: string | Buffer; + format?: KeyFormat | undefined; + type?: 'pkcs1' | 'spki' | undefined; + } + /** + * Asynchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKey + * } = await import('crypto'); + * + * generateKey('hmac', { length: 64 }, (err, key) => { + * if (err) throw err; + * console.log(key.export().toString('hex')); // 46e..........620 + * }); + * ``` + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKey( + type: 'hmac' | 'aes', + options: { + length: number; + }, + callback: (err: Error | null, key: KeyObject) => void + ): void; + /** + * Synchronously generates a new random secret key of the given `length`. The`type` will determine which validations will be performed on the `length`. + * + * ```js + * const { + * generateKeySync + * } = await import('crypto'); + * + * const key = generateKeySync('hmac', { length: 64 }); + * console.log(key.export().toString('hex')); // e89..........41e + * ``` + * @since v15.0.0 + * @param type The intended use of the generated secret key. Currently accepted values are `'hmac'` and `'aes'`. + */ + function generateKeySync( + type: 'hmac' | 'aes', + options: { + length: number; + } + ): KeyObject; + interface JsonWebKeyInput { + key: JsonWebKey; + format: 'jwk'; + } + /** + * Creates and returns a new key object containing a private key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; otherwise, `key`must be an object with the properties described above. + * + * If the private key is encrypted, a `passphrase` must be specified. The length + * of the passphrase is limited to 1024 bytes. + * @since v11.6.0 + */ + function createPrivateKey(key: PrivateKeyInput | string | Buffer | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a public key. If `key` is a + * string or `Buffer`, `format` is assumed to be `'pem'`; if `key` is a `KeyObject`with type `'private'`, the public key is derived from the given private key; + * otherwise, `key` must be an object with the properties described above. + * + * If the format is `'pem'`, the `'key'` may also be an X.509 certificate. + * + * Because public keys can be derived from private keys, a private key may be + * passed instead of a public key. In that case, this function behaves as if {@link createPrivateKey} had been called, except that the type of the + * returned `KeyObject` will be `'public'` and that the private key cannot be + * extracted from the returned `KeyObject`. Similarly, if a `KeyObject` with type`'private'` is given, a new `KeyObject` with type `'public'` will be returned + * and it will be impossible to extract the private key from the returned object. + * @since v11.6.0 + */ + function createPublicKey(key: PublicKeyInput | string | Buffer | KeyObject | JsonWebKeyInput): KeyObject; + /** + * Creates and returns a new key object containing a secret key for symmetric + * encryption or `Hmac`. + * @since v11.6.0 + * @param encoding The string encoding when `key` is a string. + */ + function createSecretKey(key: NodeJS.ArrayBufferView): KeyObject; + function createSecretKey(key: string, encoding: BufferEncoding): KeyObject; + /** + * Creates and returns a `Sign` object that uses the given `algorithm`. Use {@link getHashes} to obtain the names of the available digest algorithms. + * Optional `options` argument controls the `stream.Writable` behavior. + * + * In some cases, a `Sign` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createSign(algorithm: string, options?: stream.WritableOptions): Sign; + type DSAEncoding = 'der' | 'ieee-p1363'; + interface SigningOptions { + /** + * @See crypto.constants.RSA_PKCS1_PADDING + */ + padding?: number | undefined; + saltLength?: number | undefined; + dsaEncoding?: DSAEncoding | undefined; + } + interface SignPrivateKeyInput extends PrivateKeyInput, SigningOptions {} + interface SignKeyObjectInput extends SigningOptions { + key: KeyObject; + } + interface VerifyPublicKeyInput extends PublicKeyInput, SigningOptions {} + interface VerifyKeyObjectInput extends SigningOptions { + key: KeyObject; + } + type KeyLike = string | Buffer | KeyObject; + /** + * The `Sign` class is a utility for generating signatures. It can be used in one + * of two ways: + * + * * As a writable `stream`, where data to be signed is written and the `sign.sign()` method is used to generate and return the signature, or + * * Using the `sign.update()` and `sign.sign()` methods to produce the + * signature. + * + * The {@link createSign} method is used to create `Sign` instances. The + * argument is the string name of the hash function to use. `Sign` objects are not + * to be created directly using the `new` keyword. + * + * Example: Using `Sign` and `Verify` objects as streams: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify + * } = await import('crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('ec', { + * namedCurve: 'sect239k1' + * }); + * + * const sign = createSign('SHA256'); + * sign.write('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey, 'hex'); + * + * const verify = createVerify('SHA256'); + * verify.write('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature, 'hex')); + * // Prints: true + * ``` + * + * Example: Using the `sign.update()` and `verify.update()` methods: + * + * ```js + * const { + * generateKeyPairSync, + * createSign, + * createVerify + * } = await import('crypto'); + * + * const { privateKey, publicKey } = generateKeyPairSync('rsa', { + * modulusLength: 2048, + * }); + * + * const sign = createSign('SHA256'); + * sign.update('some data to sign'); + * sign.end(); + * const signature = sign.sign(privateKey); + * + * const verify = createVerify('SHA256'); + * verify.update('some data to sign'); + * verify.end(); + * console.log(verify.verify(publicKey, signature)); + * // Prints: true + * ``` + * @since v0.1.92 + */ + class Sign extends stream.Writable { + private constructor(); + /** + * Updates the `Sign` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `encoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): this; + update(data: string, inputEncoding: Encoding): this; + /** + * Calculates the signature on all the data passed through using either `sign.update()` or `sign.write()`. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the following additional properties can be passed: + * + * If `outputEncoding` is provided a string is returned; otherwise a `Buffer` is returned. + * + * The `Sign` object can not be again used after `sign.sign()` method has been + * called. Multiple calls to `sign.sign()` will result in an error being thrown. + * @since v0.1.92 + */ + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + sign(privateKey: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, outputFormat: BinaryToTextEncoding): string; + } + /** + * Creates and returns a `Verify` object that uses the given algorithm. + * Use {@link getHashes} to obtain an array of names of the available + * signing algorithms. Optional `options` argument controls the`stream.Writable` behavior. + * + * In some cases, a `Verify` instance can be created using the name of a signature + * algorithm, such as `'RSA-SHA256'`, instead of a digest algorithm. This will use + * the corresponding digest algorithm. This does not work for all signature + * algorithms, such as `'ecdsa-with-SHA256'`, so it is best to always use digest + * algorithm names. + * @since v0.1.92 + * @param options `stream.Writable` options + */ + function createVerify(algorithm: string, options?: stream.WritableOptions): Verify; + /** + * The `Verify` class is a utility for verifying signatures. It can be used in one + * of two ways: + * + * * As a writable `stream` where written data is used to validate against the + * supplied signature, or + * * Using the `verify.update()` and `verify.verify()` methods to verify + * the signature. + * + * The {@link createVerify} method is used to create `Verify` instances.`Verify` objects are not to be created directly using the `new` keyword. + * + * See `Sign` for examples. + * @since v0.1.92 + */ + class Verify extends stream.Writable { + private constructor(); + /** + * Updates the `Verify` content with the given `data`, the encoding of which + * is given in `inputEncoding`. + * If `inputEncoding` is not provided, and the `data` is a string, an + * encoding of `'utf8'` is enforced. If `data` is a `Buffer`, `TypedArray`, or`DataView`, then `inputEncoding` is ignored. + * + * This can be called many times with new data as it is streamed. + * @since v0.1.92 + * @param inputEncoding The `encoding` of the `data` string. + */ + update(data: BinaryLike): Verify; + update(data: string, inputEncoding: Encoding): Verify; + /** + * Verifies the provided data using the given `object` and `signature`. + * + * If `object` is not a `KeyObject`, this function behaves as if`object` had been passed to {@link createPublicKey}. If it is an + * object, the following additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the data, in + * the `signatureEncoding`. + * If a `signatureEncoding` is specified, the `signature` is expected to be a + * string; otherwise `signature` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * The `verify` object can not be used again after `verify.verify()` has been + * called. Multiple calls to `verify.verify()` will result in an error being + * thrown. + * + * Because public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.1.92 + */ + verify(object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, signature: NodeJS.ArrayBufferView): boolean; + verify(object: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, signature: string, signature_format?: BinaryToTextEncoding): boolean; + } + /** + * Creates a `DiffieHellman` key exchange object using the supplied `prime` and an + * optional specific `generator`. + * + * The `generator` argument can be a number, string, or `Buffer`. If`generator` is not specified, the value `2` is used. + * + * If `primeEncoding` is specified, `prime` is expected to be a string; otherwise + * a `Buffer`, `TypedArray`, or `DataView` is expected. + * + * If `generatorEncoding` is specified, `generator` is expected to be a string; + * otherwise a number, `Buffer`, `TypedArray`, or `DataView` is expected. + * @since v0.11.12 + * @param primeEncoding The `encoding` of the `prime` string. + * @param [generator=2] + * @param generatorEncoding The `encoding` of the `generator` string. + */ + function createDiffieHellman(primeLength: number, generator?: number): DiffieHellman; + function createDiffieHellman(prime: ArrayBuffer | NodeJS.ArrayBufferView, generator?: number | ArrayBuffer | NodeJS.ArrayBufferView): DiffieHellman; + function createDiffieHellman(prime: ArrayBuffer | NodeJS.ArrayBufferView, generator: string, generatorEncoding: BinaryToTextEncoding): DiffieHellman; + function createDiffieHellman(prime: string, primeEncoding: BinaryToTextEncoding, generator?: number | ArrayBuffer | NodeJS.ArrayBufferView): DiffieHellman; + function createDiffieHellman(prime: string, primeEncoding: BinaryToTextEncoding, generator: string, generatorEncoding: BinaryToTextEncoding): DiffieHellman; + /** + * The `DiffieHellman` class is a utility for creating Diffie-Hellman key + * exchanges. + * + * Instances of the `DiffieHellman` class can be created using the {@link createDiffieHellman} function. + * + * ```js + * import assert from 'assert'; + * + * const { + * createDiffieHellman + * } = await import('crypto'); + * + * // Generate Alice's keys... + * const alice = createDiffieHellman(2048); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createDiffieHellman(alice.getPrime(), alice.getGenerator()); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * // OK + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * ``` + * @since v0.5.0 + */ + class DiffieHellman { + private constructor(); + /** + * Generates private and public Diffie-Hellman key values, and returns + * the public key in the specified `encoding`. This key should be + * transferred to the other party. + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using the specified `inputEncoding`, and secret is + * encoded using specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `outputEncoding` is given a string is returned; otherwise, a `Buffer` is returned. + * @since v0.5.0 + * @param inputEncoding The `encoding` of an `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding?: null, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding?: null): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, inputEncoding: null, outputEncoding: BinaryToTextEncoding): string; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman prime in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrime(): Buffer; + getPrime(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman generator in the specified `encoding`. + * If `encoding` is provided a string is + * returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getGenerator(): Buffer; + getGenerator(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman public key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPublicKey(): Buffer; + getPublicKey(encoding: BinaryToTextEncoding): string; + /** + * Returns the Diffie-Hellman private key in the specified `encoding`. + * If `encoding` is provided a + * string is returned; otherwise a `Buffer` is returned. + * @since v0.5.0 + * @param encoding The `encoding` of the return value. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * Sets the Diffie-Hellman public key. If the `encoding` argument is provided,`publicKey` is expected + * to be a string. If no `encoding` is provided, `publicKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `publicKey` string. + */ + setPublicKey(publicKey: NodeJS.ArrayBufferView): void; + setPublicKey(publicKey: string, encoding: BufferEncoding): void; + /** + * Sets the Diffie-Hellman private key. If the `encoding` argument is provided,`privateKey` is expected + * to be a string. If no `encoding` is provided, `privateKey` is expected + * to be a `Buffer`, `TypedArray`, or `DataView`. + * @since v0.5.0 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BufferEncoding): void; + /** + * A bit field containing any warnings and/or errors resulting from a check + * performed during initialization of the `DiffieHellman` object. + * + * The following values are valid for this property (as defined in `constants`module): + * + * * `DH_CHECK_P_NOT_SAFE_PRIME` + * * `DH_CHECK_P_NOT_PRIME` + * * `DH_UNABLE_TO_CHECK_GENERATOR` + * * `DH_NOT_SUITABLE_GENERATOR` + * @since v0.11.12 + */ + verifyError: number; + } + /** + * The `DiffieHellmanGroup` class takes a well-known modp group as its argument. + * It works the same as `DiffieHellman`, except that it does not allow changing its keys after creation. + * In other words, it does not implement `setPublicKey()` or `setPrivateKey()` methods. + * + * ```js + * const { createDiffieHellmanGroup } = await import('node:crypto'); + * const dh = createDiffieHellmanGroup('modp1'); + * ``` + * The name (e.g. `'modp1'`) is taken from [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt) (modp1 and 2) and [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt): + * ```bash + * $ perl -ne 'print "$1\n" if /"(modp\d+)"/' src/node_crypto_groups.h + * modp1 # 768 bits + * modp2 # 1024 bits + * modp5 # 1536 bits + * modp14 # 2048 bits + * modp15 # etc. + * modp16 + * modp17 + * modp18 + * ``` + * @since v0.7.5 + */ + const DiffieHellmanGroup: DiffieHellmanGroupConstructor; + interface DiffieHellmanGroupConstructor { + new(name: string): DiffieHellmanGroup; + (name: string): DiffieHellmanGroup; + readonly prototype: DiffieHellmanGroup; + } + type DiffieHellmanGroup = Omit; + /** + * Creates a predefined `DiffieHellmanGroup` key exchange object. The + * supported groups are: `'modp1'`, `'modp2'`, `'modp5'` (defined in [RFC 2412](https://www.rfc-editor.org/rfc/rfc2412.txt), but see `Caveats`) and `'modp14'`, `'modp15'`,`'modp16'`, `'modp17'`, + * `'modp18'` (defined in [RFC 3526](https://www.rfc-editor.org/rfc/rfc3526.txt)). The + * returned object mimics the interface of objects created by {@link createDiffieHellman}, but will not allow changing + * the keys (with `diffieHellman.setPublicKey()`, for example). The + * advantage of using this method is that the parties do not have to + * generate nor exchange a group modulus beforehand, saving both processor + * and communication time. + * + * Example (obtaining a shared secret): + * + * ```js + * const { + * getDiffieHellman + * } = await import('crypto'); + * const alice = getDiffieHellman('modp14'); + * const bob = getDiffieHellman('modp14'); + * + * alice.generateKeys(); + * bob.generateKeys(); + * + * const aliceSecret = alice.computeSecret(bob.getPublicKey(), null, 'hex'); + * const bobSecret = bob.computeSecret(alice.getPublicKey(), null, 'hex'); + * + * // aliceSecret and bobSecret should be the same + * console.log(aliceSecret === bobSecret); + * ``` + * @since v0.7.5 + */ + function getDiffieHellman(groupName: string): DiffieHellmanGroup; + /** + * An alias for {@link getDiffieHellman} + * @since v0.9.3 + */ + function createDiffieHellmanGroup(name: string): DiffieHellmanGroup; + /** + * Provides an asynchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an error occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. By default, the successfully generated`derivedKey` will be passed to the callback as a `Buffer`. An error will be + * thrown if any of the input arguments specify invalid values or types. + * + * If `digest` is `null`, `'sha1'` will be used. This behavior is deprecated, + * please specify a `digest` explicitly. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2 + * } = await import('crypto'); + * + * pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * ``` + * + * The `crypto.DEFAULT_ENCODING` property can be used to change the way the`derivedKey` is passed to the callback. This property, however, has been + * deprecated and use should be avoided. + * + * ```js + * import crypto from 'crypto'; + * crypto.DEFAULT_ENCODING = 'hex'; + * crypto.pbkdf2('secret', 'salt', 100000, 512, 'sha512', (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey); // '3745e48...aa39b34' + * }); + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * @since v0.5.5 + */ + function pbkdf2(password: BinaryLike, salt: BinaryLike, iterations: number, keylen: number, digest: string, callback: (err: Error | null, derivedKey: Buffer) => void): void; + /** + * Provides a synchronous Password-Based Key Derivation Function 2 (PBKDF2) + * implementation. A selected HMAC digest algorithm specified by `digest` is + * applied to derive a key of the requested byte length (`keylen`) from the`password`, `salt` and `iterations`. + * + * If an error occurs an `Error` will be thrown, otherwise the derived key will be + * returned as a `Buffer`. + * + * If `digest` is `null`, `'sha1'` will be used. This behavior is deprecated, + * please specify a `digest` explicitly. + * + * The `iterations` argument must be a number set as high as possible. The + * higher the number of iterations, the more secure the derived key will be, + * but will take a longer amount of time to complete. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * ```js + * const { + * pbkdf2Sync + * } = await import('crypto'); + * + * const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); + * console.log(key.toString('hex')); // '3745e48...08d59ae' + * ``` + * + * The `crypto.DEFAULT_ENCODING` property may be used to change the way the`derivedKey` is returned. This property, however, is deprecated and use + * should be avoided. + * + * ```js + * import crypto from 'crypto'; + * crypto.DEFAULT_ENCODING = 'hex'; + * const key = crypto.pbkdf2Sync('secret', 'salt', 100000, 512, 'sha512'); + * console.log(key); // '3745e48...aa39b34' + * ``` + * + * An array of supported digest functions can be retrieved using {@link getHashes}. + * @since v0.9.3 + */ + function pbkdf2Sync(password: BinaryLike, salt: BinaryLike, iterations: number, keylen: number, digest: string): Buffer; + /** + * Generates cryptographically strong pseudorandom data. The `size` argument + * is a number indicating the number of bytes to generate. + * + * If a `callback` function is provided, the bytes are generated asynchronously + * and the `callback` function is invoked with two arguments: `err` and `buf`. + * If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The`buf` argument is a `Buffer` containing the generated bytes. + * + * ```js + * // Asynchronous + * const { + * randomBytes + * } = await import('crypto'); + * + * randomBytes(256, (err, buf) => { + * if (err) throw err; + * console.log(`${buf.length} bytes of random data: ${buf.toString('hex')}`); + * }); + * ``` + * + * If the `callback` function is not provided, the random bytes are generated + * synchronously and returned as a `Buffer`. An error will be thrown if + * there is a problem generating the bytes. + * + * ```js + * // Synchronous + * const { + * randomBytes + * } = await import('crypto'); + * + * const buf = randomBytes(256); + * console.log( + * `${buf.length} bytes of random data: ${buf.toString('hex')}`); + * ``` + * + * The `crypto.randomBytes()` method will not complete until there is + * sufficient entropy available. + * This should normally never take longer than a few milliseconds. The only time + * when generating the random bytes may conceivably block for a longer period of + * time is right after boot, when the whole system is still low on entropy. + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomBytes()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomBytes` requests when doing so as part of fulfilling a client + * request. + * @since v0.5.8 + * @param size The number of bytes to generate. The `size` must not be larger than `2**31 - 1`. + * @return if the `callback` function is not provided. + */ + function randomBytes(size: number): Buffer; + function randomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + function pseudoRandomBytes(size: number): Buffer; + function pseudoRandomBytes(size: number, callback: (err: Error | null, buf: Buffer) => void): void; + /** + * Return a random integer `n` such that `min <= n < max`. This + * implementation avoids [modulo bias](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle#Modulo_bias). + * + * The range (`max - min`) must be less than 2^48. `min` and `max` must + * be [safe integers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger). + * + * If the `callback` function is not provided, the random integer is + * generated synchronously. + * + * ```js + * // Asynchronous + * const { + * randomInt + * } = await import('crypto'); + * + * randomInt(3, (err, n) => { + * if (err) throw err; + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * }); + * ``` + * + * ```js + * // Synchronous + * const { + * randomInt + * } = await import('crypto'); + * + * const n = randomInt(3); + * console.log(`Random number chosen from (0, 1, 2): ${n}`); + * ``` + * + * ```js + * // With `min` argument + * const { + * randomInt + * } = await import('crypto'); + * + * const n = randomInt(1, 7); + * console.log(`The dice rolled: ${n}`); + * ``` + * @since v14.10.0, v12.19.0 + * @param [min=0] Start of random range (inclusive). + * @param max End of random range (exclusive). + * @param callback `function(err, n) {}`. + */ + function randomInt(max: number): number; + function randomInt(min: number, max: number): number; + function randomInt(max: number, callback: (err: Error | null, value: number) => void): void; + function randomInt(min: number, max: number, callback: (err: Error | null, value: number) => void): void; + /** + * Synchronous version of {@link randomFill}. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFillSync } = await import('crypto'); + * + * const buf = Buffer.alloc(10); + * console.log(randomFillSync(buf).toString('hex')); + * + * randomFillSync(buf, 5); + * console.log(buf.toString('hex')); + * + * // The above is equivalent to the following: + * randomFillSync(buf, 5, 5); + * console.log(buf.toString('hex')); + * ``` + * + * Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as`buffer`. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFillSync } = await import('crypto'); + * + * const a = new Uint32Array(10); + * console.log(Buffer.from(randomFillSync(a).buffer, + * a.byteOffset, a.byteLength).toString('hex')); + * + * const b = new DataView(new ArrayBuffer(10)); + * console.log(Buffer.from(randomFillSync(b).buffer, + * b.byteOffset, b.byteLength).toString('hex')); + * + * const c = new ArrayBuffer(10); + * console.log(Buffer.from(randomFillSync(c)).toString('hex')); + * ``` + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @return The object passed as `buffer` argument. + */ + function randomFillSync(buffer: T, offset?: number, size?: number): T; + /** + * This function is similar to {@link randomBytes} but requires the first + * argument to be a `Buffer` that will be filled. It also + * requires that a callback is passed in. + * + * If the `callback` function is not provided, an error will be thrown. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFill } = await import('crypto'); + * + * const buf = Buffer.alloc(10); + * randomFill(buf, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * randomFill(buf, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * + * // The above is equivalent to the following: + * randomFill(buf, 5, 5, (err, buf) => { + * if (err) throw err; + * console.log(buf.toString('hex')); + * }); + * ``` + * + * Any `ArrayBuffer`, `TypedArray`, or `DataView` instance may be passed as`buffer`. + * + * While this includes instances of `Float32Array` and `Float64Array`, this + * function should not be used to generate random floating-point numbers. The + * result may contain `+Infinity`, `-Infinity`, and `NaN`, and even if the array + * contains finite numbers only, they are not drawn from a uniform random + * distribution and have no meaningful lower or upper bounds. + * + * ```js + * import { Buffer } from 'buffer'; + * const { randomFill } = await import('crypto'); + * + * const a = new Uint32Array(10); + * randomFill(a, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const b = new DataView(new ArrayBuffer(10)); + * randomFill(b, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength) + * .toString('hex')); + * }); + * + * const c = new ArrayBuffer(10); + * randomFill(c, (err, buf) => { + * if (err) throw err; + * console.log(Buffer.from(buf).toString('hex')); + * }); + * ``` + * + * This API uses libuv's threadpool, which can have surprising and + * negative performance implications for some applications; see the `UV_THREADPOOL_SIZE` documentation for more information. + * + * The asynchronous version of `crypto.randomFill()` is carried out in a single + * threadpool request. To minimize threadpool task length variation, partition + * large `randomFill` requests when doing so as part of fulfilling a client + * request. + * @since v7.10.0, v6.13.0 + * @param buffer Must be supplied. The size of the provided `buffer` must not be larger than `2**31 - 1`. + * @param [offset=0] + * @param [size=buffer.length - offset] + * @param callback `function(err, buf) {}`. + */ + function randomFill(buffer: T, callback: (err: Error | null, buf: T) => void): void; + function randomFill(buffer: T, offset: number, callback: (err: Error | null, buf: T) => void): void; + function randomFill(buffer: T, offset: number, size: number, callback: (err: Error | null, buf: T) => void): void; + interface ScryptOptions { + cost?: number | undefined; + blockSize?: number | undefined; + parallelization?: number | undefined; + N?: number | undefined; + r?: number | undefined; + p?: number | undefined; + maxmem?: number | undefined; + } + /** + * Provides an asynchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * The `callback` function is called with two arguments: `err` and `derivedKey`.`err` is an exception object when key derivation fails, otherwise `err` is`null`. `derivedKey` is passed to the + * callback as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scrypt + * } = await import('crypto'); + * + * // Using the factory defaults. + * scrypt('password', 'salt', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...08d59ae' + * }); + * // Using a custom N parameter. Must be a power of two. + * scrypt('password', 'salt', 64, { N: 1024 }, (err, derivedKey) => { + * if (err) throw err; + * console.log(derivedKey.toString('hex')); // '3745e48...aa39b34' + * }); + * ``` + * @since v10.5.0 + */ + function scrypt(password: BinaryLike, salt: BinaryLike, keylen: number, callback: (err: Error | null, derivedKey: Buffer) => void): void; + function scrypt(password: BinaryLike, salt: BinaryLike, keylen: number, options: ScryptOptions, callback: (err: Error | null, derivedKey: Buffer) => void): void; + /** + * Provides a synchronous [scrypt](https://en.wikipedia.org/wiki/Scrypt) implementation. Scrypt is a password-based + * key derivation function that is designed to be expensive computationally and + * memory-wise in order to make brute-force attacks unrewarding. + * + * The `salt` should be as unique as possible. It is recommended that a salt is + * random and at least 16 bytes long. See [NIST SP 800-132](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf) for details. + * + * When passing strings for `password` or `salt`, please consider `caveats when using strings as inputs to cryptographic APIs`. + * + * An exception is thrown when key derivation fails, otherwise the derived key is + * returned as a `Buffer`. + * + * An exception is thrown when any of the input arguments specify invalid values + * or types. + * + * ```js + * const { + * scryptSync + * } = await import('crypto'); + * // Using the factory defaults. + * + * const key1 = scryptSync('password', 'salt', 64); + * console.log(key1.toString('hex')); // '3745e48...08d59ae' + * // Using a custom N parameter. Must be a power of two. + * const key2 = scryptSync('password', 'salt', 64, { N: 1024 }); + * console.log(key2.toString('hex')); // '3745e48...aa39b34' + * ``` + * @since v10.5.0 + */ + function scryptSync(password: BinaryLike, salt: BinaryLike, keylen: number, options?: ScryptOptions): Buffer; + interface RsaPublicKey { + key: KeyLike; + padding?: number | undefined; + } + interface RsaPrivateKey { + key: KeyLike; + passphrase?: string | undefined; + /** + * @default 'sha1' + */ + oaepHash?: string | undefined; + oaepLabel?: NodeJS.TypedArray | undefined; + padding?: number | undefined; + } + /** + * Encrypts the content of `buffer` with `key` and returns a new `Buffer` with encrypted content. The returned data can be decrypted using + * the corresponding private key, for example using {@link privateDecrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v0.11.14 + */ + function publicEncrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `key`.`buffer` was previously encrypted using + * the corresponding private key, for example using {@link privateEncrypt}. + * + * If `key` is not a `KeyObject`, this function behaves as if`key` had been passed to {@link createPublicKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * + * Because RSA public keys can be derived from private keys, a private key may + * be passed instead of a public key. + * @since v1.1.0 + */ + function publicDecrypt(key: RsaPublicKey | RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Decrypts `buffer` with `privateKey`. `buffer` was previously encrypted using + * the corresponding public key, for example using {@link publicEncrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_OAEP_PADDING`. + * @since v0.11.14 + */ + function privateDecrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * Encrypts `buffer` with `privateKey`. The returned data can be decrypted using + * the corresponding public key, for example using {@link publicDecrypt}. + * + * If `privateKey` is not a `KeyObject`, this function behaves as if`privateKey` had been passed to {@link createPrivateKey}. If it is an + * object, the `padding` property can be passed. Otherwise, this function uses`RSA_PKCS1_PADDING`. + * @since v1.1.0 + */ + function privateEncrypt(privateKey: RsaPrivateKey | KeyLike, buffer: NodeJS.ArrayBufferView): Buffer; + /** + * ```js + * const { + * getCiphers + * } = await import('crypto'); + * + * console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] + * ``` + * @since v0.9.3 + * @return An array with the names of the supported cipher algorithms. + */ + function getCiphers(): string[]; + /** + * ```js + * const { + * getCurves + * } = await import('crypto'); + * + * console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] + * ``` + * @since v2.3.0 + * @return An array with the names of the supported elliptic curves. + */ + function getCurves(): string[]; + /** + * @since v10.0.0 + * @return `1` if and only if a FIPS compliant crypto provider is currently in use, `0` otherwise. A future semver-major release may change the return type of this API to a {boolean}. + */ + function getFips(): 1 | 0; + /** + * Enables the FIPS compliant crypto provider in a FIPS-enabled Node.js build. Throws an error if FIPS mode is not available. + * @since v10.0.0 + * @param bool `true` to enable FIPS mode. + */ + function setFips(bool: boolean): void; + /** + * ```js + * const { + * getHashes + * } = await import('crypto'); + * + * console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] + * ``` + * @since v0.9.3 + * @return An array of the names of the supported hash algorithms, such as `'RSA-SHA256'`. Hash algorithms are also called "digest" algorithms. + */ + function getHashes(): string[]; + /** + * The `ECDH` class is a utility for creating Elliptic Curve Diffie-Hellman (ECDH) + * key exchanges. + * + * Instances of the `ECDH` class can be created using the {@link createECDH} function. + * + * ```js + * import assert from 'assert'; + * + * const { + * createECDH + * } = await import('crypto'); + * + * // Generate Alice's keys... + * const alice = createECDH('secp521r1'); + * const aliceKey = alice.generateKeys(); + * + * // Generate Bob's keys... + * const bob = createECDH('secp521r1'); + * const bobKey = bob.generateKeys(); + * + * // Exchange and generate the secret... + * const aliceSecret = alice.computeSecret(bobKey); + * const bobSecret = bob.computeSecret(aliceKey); + * + * assert.strictEqual(aliceSecret.toString('hex'), bobSecret.toString('hex')); + * // OK + * ``` + * @since v0.11.14 + */ + class ECDH { + private constructor(); + /** + * Converts the EC Diffie-Hellman public key specified by `key` and `curve` to the + * format specified by `format`. The `format` argument specifies point encoding + * and can be `'compressed'`, `'uncompressed'` or `'hybrid'`. The supplied key is + * interpreted using the specified `inputEncoding`, and the returned key is encoded + * using the specified `outputEncoding`. + * + * Use {@link getCurves} to obtain a list of available curve names. + * On recent OpenSSL releases, `openssl ecparam -list_curves` will also display + * the name and description of each available elliptic curve. + * + * If `format` is not specified the point will be returned in `'uncompressed'`format. + * + * If the `inputEncoding` is not provided, `key` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * Example (uncompressing a key): + * + * ```js + * const { + * createECDH, + * ECDH + * } = await import('crypto'); + * + * const ecdh = createECDH('secp256k1'); + * ecdh.generateKeys(); + * + * const compressedKey = ecdh.getPublicKey('hex', 'compressed'); + * + * const uncompressedKey = ECDH.convertKey(compressedKey, + * 'secp256k1', + * 'hex', + * 'hex', + * 'uncompressed'); + * + * // The converted key and the uncompressed public key should be the same + * console.log(uncompressedKey === ecdh.getPublicKey('hex')); + * ``` + * @since v10.0.0 + * @param inputEncoding The `encoding` of the `key` string. + * @param outputEncoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + static convertKey( + key: BinaryLike, + curve: string, + inputEncoding?: BinaryToTextEncoding, + outputEncoding?: 'latin1' | 'hex' | 'base64' | 'base64url', + format?: 'uncompressed' | 'compressed' | 'hybrid' + ): Buffer | string; + /** + * Generates private and public EC Diffie-Hellman key values, and returns + * the public key in the specified `format` and `encoding`. This key should be + * transferred to the other party. + * + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified, the point will be returned in`'uncompressed'` format. + * + * If `encoding` is provided a string is returned; otherwise a `Buffer` is returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @param [format='uncompressed'] + */ + generateKeys(): Buffer; + generateKeys(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Computes the shared secret using `otherPublicKey` as the other + * party's public key and returns the computed shared secret. The supplied + * key is interpreted using specified `inputEncoding`, and the returned secret + * is encoded using the specified `outputEncoding`. + * If the `inputEncoding` is not + * provided, `otherPublicKey` is expected to be a `Buffer`, `TypedArray`, or`DataView`. + * + * If `outputEncoding` is given a string will be returned; otherwise a `Buffer` is returned. + * + * `ecdh.computeSecret` will throw an`ERR_CRYPTO_ECDH_INVALID_PUBLIC_KEY` error when `otherPublicKey`lies outside of the elliptic curve. Since `otherPublicKey` is + * usually supplied from a remote user over an insecure network, + * be sure to handle this exception accordingly. + * @since v0.11.14 + * @param inputEncoding The `encoding` of the `otherPublicKey` string. + * @param outputEncoding The `encoding` of the return value. + */ + computeSecret(otherPublicKey: NodeJS.ArrayBufferView): Buffer; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding): Buffer; + computeSecret(otherPublicKey: NodeJS.ArrayBufferView, outputEncoding: BinaryToTextEncoding): string; + computeSecret(otherPublicKey: string, inputEncoding: BinaryToTextEncoding, outputEncoding: BinaryToTextEncoding): string; + /** + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param encoding The `encoding` of the return value. + * @return The EC Diffie-Hellman in the specified `encoding`. + */ + getPrivateKey(): Buffer; + getPrivateKey(encoding: BinaryToTextEncoding): string; + /** + * The `format` argument specifies point encoding and can be `'compressed'` or`'uncompressed'`. If `format` is not specified the point will be returned in`'uncompressed'` format. + * + * If `encoding` is specified, a string is returned; otherwise a `Buffer` is + * returned. + * @since v0.11.14 + * @param [encoding] The `encoding` of the return value. + * @param [format='uncompressed'] + * @return The EC Diffie-Hellman public key in the specified `encoding` and `format`. + */ + getPublicKey(encoding?: null, format?: ECDHKeyFormat): Buffer; + getPublicKey(encoding: BinaryToTextEncoding, format?: ECDHKeyFormat): string; + /** + * Sets the EC Diffie-Hellman private key. + * If `encoding` is provided, `privateKey` is expected + * to be a string; otherwise `privateKey` is expected to be a `Buffer`,`TypedArray`, or `DataView`. + * + * If `privateKey` is not valid for the curve specified when the `ECDH` object was + * created, an error is thrown. Upon setting the private key, the associated + * public point (key) is also generated and set in the `ECDH` object. + * @since v0.11.14 + * @param encoding The `encoding` of the `privateKey` string. + */ + setPrivateKey(privateKey: NodeJS.ArrayBufferView): void; + setPrivateKey(privateKey: string, encoding: BinaryToTextEncoding): void; + } + /** + * Creates an Elliptic Curve Diffie-Hellman (`ECDH`) key exchange object using a + * predefined curve specified by the `curveName` string. Use {@link getCurves} to obtain a list of available curve names. On recent + * OpenSSL releases, `openssl ecparam -list_curves` will also display the name + * and description of each available elliptic curve. + * @since v0.11.14 + */ + function createECDH(curveName: string): ECDH; + /** + * This function is based on a constant-time algorithm. + * Returns true if `a` is equal to `b`, without leaking timing information that + * would allow an attacker to guess one of the values. This is suitable for + * comparing HMAC digests or secret values like authentication cookies or [capability urls](https://www.w3.org/TR/capability-urls/). + * + * `a` and `b` must both be `Buffer`s, `TypedArray`s, or `DataView`s, and they + * must have the same byte length. An error is thrown if `a` and `b` have + * different byte lengths. + * + * If at least one of `a` and `b` is a `TypedArray` with more than one byte per + * entry, such as `Uint16Array`, the result will be computed using the platform + * byte order. + * + * Use of `crypto.timingSafeEqual` does not guarantee that the _surrounding_ code + * is timing-safe. Care should be taken to ensure that the surrounding code does + * not introduce timing vulnerabilities. + * @since v6.6.0 + */ + function timingSafeEqual(a: NodeJS.ArrayBufferView, b: NodeJS.ArrayBufferView): boolean; + /** @deprecated since v10.0.0 */ + const DEFAULT_ENCODING: BufferEncoding; + type KeyType = 'rsa' | 'rsa-pss' | 'dsa' | 'ec' | 'ed25519' | 'ed448' | 'x25519' | 'x448'; + type KeyFormat = 'pem' | 'der'; + interface BasePrivateKeyEncodingOptions { + format: T; + cipher?: string | undefined; + passphrase?: string | undefined; + } + interface KeyPairKeyObjectResult { + publicKey: KeyObject; + privateKey: KeyObject; + } + interface ED25519KeyPairKeyObjectOptions {} + interface ED448KeyPairKeyObjectOptions {} + interface X25519KeyPairKeyObjectOptions {} + interface X448KeyPairKeyObjectOptions {} + interface ECKeyPairKeyObjectOptions { + /** + * Name of the curve to use + */ + namedCurve: string; + } + interface RSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + } + interface RSAPSSKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + } + interface DSAKeyPairKeyObjectOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + } + interface RSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + publicKeyEncoding: { + type: 'pkcs1' | 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs1' | 'pkcs8'; + }; + } + interface RSAPSSKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Public exponent + * @default 0x10001 + */ + publicExponent?: number | undefined; + /** + * Name of the message digest + */ + hashAlgorithm?: string; + /** + * Name of the message digest used by MGF1 + */ + mgf1HashAlgorithm?: string; + /** + * Minimal salt length in bytes + */ + saltLength?: string; + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface DSAKeyPairOptions { + /** + * Key size in bits + */ + modulusLength: number; + /** + * Size of q in bits + */ + divisorLength: number; + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface ECKeyPairOptions { + /** + * Name of the curve to use. + */ + namedCurve: string; + publicKeyEncoding: { + type: 'pkcs1' | 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'sec1' | 'pkcs8'; + }; + } + interface ED25519KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface ED448KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface X25519KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface X448KeyPairOptions { + publicKeyEncoding: { + type: 'spki'; + format: PubF; + }; + privateKeyEncoding: BasePrivateKeyEncodingOptions & { + type: 'pkcs8'; + }; + } + interface KeyPairSyncResult { + publicKey: T1; + privateKey: T2; + } + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * When encoding public keys, it is recommended to use `'spki'`. When encoding + * private keys, it is recommended to use `'pkcs8'` with a strong passphrase, + * and to keep the passphrase confidential. + * + * ```js + * const { + * generateKeyPairSync + * } = await import('crypto'); + * + * const { + * publicKey, + * privateKey, + * } = generateKeyPairSync('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem' + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret' + * } + * }); + * ``` + * + * The return value `{ publicKey, privateKey }` represents the generated key pair. + * When PEM encoding was selected, the respective key will be a string, otherwise + * it will be a buffer containing the data encoded as DER. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa', options: RSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'rsa-pss', options: RSAPSSKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'dsa', options: DSAKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ec', options: ECKeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed25519', options?: ED25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options: ED448KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'ed448', options?: ED448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options: X25519KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x25519', options?: X25519KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'pem', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'pem', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'der', 'pem'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options: X448KeyPairOptions<'der', 'der'>): KeyPairSyncResult; + function generateKeyPairSync(type: 'x448', options?: X448KeyPairKeyObjectOptions): KeyPairKeyObjectResult; + /** + * Generates a new asymmetric key pair of the given `type`. RSA, RSA-PSS, DSA, EC, + * Ed25519, Ed448, X25519, X448, and DH are currently supported. + * + * If a `publicKeyEncoding` or `privateKeyEncoding` was specified, this function + * behaves as if `keyObject.export()` had been called on its result. Otherwise, + * the respective part of the key is returned as a `KeyObject`. + * + * It is recommended to encode public keys as `'spki'` and private keys as`'pkcs8'` with encryption for long-term storage: + * + * ```js + * const { + * generateKeyPair + * } = await import('crypto'); + * + * generateKeyPair('rsa', { + * modulusLength: 4096, + * publicKeyEncoding: { + * type: 'spki', + * format: 'pem' + * }, + * privateKeyEncoding: { + * type: 'pkcs8', + * format: 'pem', + * cipher: 'aes-256-cbc', + * passphrase: 'top secret' + * } + * }, (err, publicKey, privateKey) => { + * // Handle errors and use the generated key pair. + * }); + * ``` + * + * On completion, `callback` will be called with `err` set to `undefined` and`publicKey` / `privateKey` representing the generated key pair. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a `Promise` for an `Object` with `publicKey` and `privateKey` properties. + * @since v10.12.0 + * @param type Must be `'rsa'`, `'rsa-pss'`, `'dsa'`, `'ec'`, `'ed25519'`, `'ed448'`, `'x25519'`, `'x448'`, or `'dh'`. + */ + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa', options: RSAKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'rsa-pss', options: RSAPSSKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'dsa', options: DSAKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ec', options: ECKeyPairKeyObjectOptions, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed25519', options: ED25519KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'ed448', options: ED448KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x25519', options: X25519KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'pem', 'pem'>, callback: (err: Error | null, publicKey: string, privateKey: string) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'pem', 'der'>, callback: (err: Error | null, publicKey: string, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'der', 'pem'>, callback: (err: Error | null, publicKey: Buffer, privateKey: string) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairOptions<'der', 'der'>, callback: (err: Error | null, publicKey: Buffer, privateKey: Buffer) => void): void; + function generateKeyPair(type: 'x448', options: X448KeyPairKeyObjectOptions | undefined, callback: (err: Error | null, publicKey: KeyObject, privateKey: KeyObject) => void): void; + namespace generateKeyPair { + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'rsa', + options: RSAKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'rsa', options: RSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'rsa-pss', + options: RSAPSSKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'rsa-pss', options: RSAPSSKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'dsa', + options: DSAKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'dsa', options: DSAKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'ec', + options: ECKeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'ec', options: ECKeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'ed25519', + options: ED25519KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'ed25519', options?: ED25519KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'ed448', + options: ED448KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'ed448', options?: ED448KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'x25519', + options: X25519KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'x25519', options?: X25519KeyPairKeyObjectOptions): Promise; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'pem', 'pem'> + ): Promise<{ + publicKey: string; + privateKey: string; + }>; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'pem', 'der'> + ): Promise<{ + publicKey: string; + privateKey: Buffer; + }>; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'der', 'pem'> + ): Promise<{ + publicKey: Buffer; + privateKey: string; + }>; + function __promisify__( + type: 'x448', + options: X448KeyPairOptions<'der', 'der'> + ): Promise<{ + publicKey: Buffer; + privateKey: Buffer; + }>; + function __promisify__(type: 'x448', options?: X448KeyPairKeyObjectOptions): Promise; + } + /** + * Calculates and returns the signature for `data` using the given private key and + * algorithm. If `algorithm` is `null` or `undefined`, then the algorithm is + * dependent upon the key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPrivateKey}. If it is an object, the following + * additional properties can be passed: + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function sign(algorithm: string | null | undefined, data: NodeJS.ArrayBufferView, key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput): Buffer; + function sign( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | SignKeyObjectInput | SignPrivateKeyInput, + callback: (error: Error | null, data: Buffer) => void + ): void; + /** + * Verifies the given signature for `data` using the given key and algorithm. If`algorithm` is `null` or `undefined`, then the algorithm is dependent upon the + * key type (especially Ed25519 and Ed448). + * + * If `key` is not a `KeyObject`, this function behaves as if `key` had been + * passed to {@link createPublicKey}. If it is an object, the following + * additional properties can be passed: + * + * The `signature` argument is the previously calculated signature for the `data`. + * + * Because public keys can be derived from private keys, a private key or a public + * key may be passed for `key`. + * + * If the `callback` function is provided this function uses libuv's threadpool. + * @since v12.0.0 + */ + function verify(algorithm: string | null | undefined, data: NodeJS.ArrayBufferView, key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, signature: NodeJS.ArrayBufferView): boolean; + function verify( + algorithm: string | null | undefined, + data: NodeJS.ArrayBufferView, + key: KeyLike | VerifyKeyObjectInput | VerifyPublicKeyInput, + signature: NodeJS.ArrayBufferView, + callback: (error: Error | null, result: boolean) => void + ): void; + /** + * Computes the Diffie-Hellman secret based on a `privateKey` and a `publicKey`. + * Both keys must have the same `asymmetricKeyType`, which must be one of `'dh'`(for Diffie-Hellman), `'ec'` (for ECDH), `'x448'`, or `'x25519'` (for ECDH-ES). + * @since v13.9.0, v12.17.0 + */ + function diffieHellman(options: { privateKey: KeyObject; publicKey: KeyObject }): Buffer; + type CipherMode = 'cbc' | 'ccm' | 'cfb' | 'ctr' | 'ecb' | 'gcm' | 'ocb' | 'ofb' | 'stream' | 'wrap' | 'xts'; + interface CipherInfoOptions { + /** + * A test key length. + */ + keyLength?: number | undefined; + /** + * A test IV length. + */ + ivLength?: number | undefined; + } + interface CipherInfo { + /** + * The name of the cipher. + */ + name: string; + /** + * The nid of the cipher. + */ + nid: number; + /** + * The block size of the cipher in bytes. + * This property is omitted when mode is 'stream'. + */ + blockSize?: number | undefined; + /** + * The expected or default initialization vector length in bytes. + * This property is omitted if the cipher does not use an initialization vector. + */ + ivLength?: number | undefined; + /** + * The expected or default key length in bytes. + */ + keyLength: number; + /** + * The cipher mode. + */ + mode: CipherMode; + } + /** + * Returns information about a given cipher. + * + * Some ciphers accept variable length keys and initialization vectors. By default, + * the `crypto.getCipherInfo()` method will return the default values for these + * ciphers. To test if a given key length or iv length is acceptable for given + * cipher, use the `keyLength` and `ivLength` options. If the given values are + * unacceptable, `undefined` will be returned. + * @since v15.0.0 + * @param nameOrNid The name or nid of the cipher to query. + */ + function getCipherInfo(nameOrNid: string | number, options?: CipherInfoOptions): CipherInfo | undefined; + /** + * HKDF is a simple key derivation function defined in RFC 5869\. The given `ikm`,`salt` and `info` are used with the `digest` to derive a key of `keylen` bytes. + * + * The supplied `callback` function is called with two arguments: `err` and`derivedKey`. If an errors occurs while deriving the key, `err` will be set; + * otherwise `err` will be `null`. The successfully generated `derivedKey` will + * be passed to the callback as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). An error will be thrown if any + * of the input arguments specify invalid values or types. + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * hkdf + * } = await import('crypto'); + * + * hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { + * if (err) throw err; + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * }); + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. It must be at least one byte in length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdf(digest: string, irm: BinaryLike | KeyObject, salt: BinaryLike, info: BinaryLike, keylen: number, callback: (err: Error | null, derivedKey: ArrayBuffer) => void): void; + /** + * Provides a synchronous HKDF key derivation function as defined in RFC 5869\. The + * given `ikm`, `salt` and `info` are used with the `digest` to derive a key of`keylen` bytes. + * + * The successfully generated `derivedKey` will be returned as an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). + * + * An error will be thrown if any of the input arguments specify invalid values or + * types, or if the derived key cannot be generated. + * + * ```js + * import { Buffer } from 'buffer'; + * const { + * hkdfSync + * } = await import('crypto'); + * + * const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); + * console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' + * ``` + * @since v15.0.0 + * @param digest The digest algorithm to use. + * @param ikm The input keying material. It must be at least one byte in length. + * @param salt The salt value. Must be provided but can be zero-length. + * @param info Additional info value. Must be provided but can be zero-length, and cannot be more than 1024 bytes. + * @param keylen The length of the key to generate. Must be greater than 0. The maximum allowable value is `255` times the number of bytes produced by the selected digest function (e.g. `sha512` + * generates 64-byte hashes, making the maximum HKDF output 16320 bytes). + */ + function hkdfSync(digest: string, ikm: BinaryLike | KeyObject, salt: BinaryLike, info: BinaryLike, keylen: number): ArrayBuffer; + interface SecureHeapUsage { + /** + * The total allocated secure heap size as specified using the `--secure-heap=n` command-line flag. + */ + total: number; + /** + * The minimum allocation from the secure heap as specified using the `--secure-heap-min` command-line flag. + */ + min: number; + /** + * The total number of bytes currently allocated from the secure heap. + */ + used: number; + /** + * The calculated ratio of `used` to `total` allocated bytes. + */ + utilization: number; + } + /** + * @since v15.6.0 + */ + function secureHeapUsed(): SecureHeapUsage; + interface RandomUUIDOptions { + /** + * By default, to improve performance, + * Node.js will pre-emptively generate and persistently cache enough + * random data to generate up to 128 random UUIDs. To generate a UUID + * without using the cache, set `disableEntropyCache` to `true`. + * + * @default `false` + */ + disableEntropyCache?: boolean | undefined; + } + /** + * Generates a random [RFC 4122](https://www.rfc-editor.org/rfc/rfc4122.txt) version 4 UUID. The UUID is generated using a + * cryptographic pseudorandom number generator. + * @since v15.6.0, v14.17.0 + */ + function randomUUID(options?: RandomUUIDOptions): string; + interface X509CheckOptions { + /** + * @default 'always' + */ + subject: 'always' | 'never'; + /** + * @default true + */ + wildcards: boolean; + /** + * @default true + */ + partialWildcards: boolean; + /** + * @default false + */ + multiLabelWildcards: boolean; + /** + * @default false + */ + singleLabelSubdomains: boolean; + } + /** + * Encapsulates an X509 certificate and provides read-only access to + * its information. + * + * ```js + * const { X509Certificate } = await import('crypto'); + * + * const x509 = new X509Certificate('{... pem encoded cert ...}'); + * + * console.log(x509.subject); + * ``` + * @since v15.6.0 + */ + class X509Certificate { + /** + * Will be \`true\` if this is a Certificate Authority (CA) certificate. + * @since v15.6.0 + */ + readonly ca: boolean; + /** + * The SHA-1 fingerprint of this certificate. + * + * Because SHA-1 is cryptographically broken and because the security of SHA-1 is + * significantly worse than that of algorithms that are commonly used to sign + * certificates, consider using `x509.fingerprint256` instead. + * @since v15.6.0 + */ + readonly fingerprint: string; + /** + * The SHA-256 fingerprint of this certificate. + * @since v15.6.0 + */ + readonly fingerprint256: string; + /** + * The SHA-512 fingerprint of this certificate. + * @since v16.14.0 + */ + readonly fingerprint512: string; + /** + * The complete subject of this certificate. + * @since v15.6.0 + */ + readonly subject: string; + /** + * The subject alternative name specified for this certificate or `undefined` + * if not available. + * @since v15.6.0 + */ + readonly subjectAltName: string | undefined; + /** + * The information access content of this certificate or `undefined` if not + * available. + * @since v15.6.0 + */ + readonly infoAccess: string | undefined; + /** + * An array detailing the key usages for this certificate. + * @since v15.6.0 + */ + readonly keyUsage: string[]; + /** + * The issuer identification included in this certificate. + * @since v15.6.0 + */ + readonly issuer: string; + /** + * The issuer certificate or `undefined` if the issuer certificate is not + * available. + * @since v15.9.0 + */ + readonly issuerCertificate?: X509Certificate | undefined; + /** + * The public key `KeyObject` for this certificate. + * @since v15.6.0 + */ + readonly publicKey: KeyObject; + /** + * A `Buffer` containing the DER encoding of this certificate. + * @since v15.6.0 + */ + readonly raw: Buffer; + /** + * The serial number of this certificate. + * + * Serial numbers are assigned by certificate authorities and do not uniquely + * identify certificates. Consider using `x509.fingerprint256` as a unique + * identifier instead. + * @since v15.6.0 + */ + readonly serialNumber: string; + /** + * The date/time from which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validFrom: string; + /** + * The date/time until which this certificate is considered valid. + * @since v15.6.0 + */ + readonly validTo: string; + constructor(buffer: BinaryLike); + /** + * Checks whether the certificate matches the given email address. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any email addresses. + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching email + * address, the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns `email` if the certificate matches, `undefined` if it does not. + */ + checkEmail(email: string, options?: Pick): string | undefined; + /** + * Checks whether the certificate matches the given host name. + * + * If the certificate matches the given host name, the matching subject name is + * returned. The returned name might be an exact match (e.g., `foo.example.com`) + * or it might contain wildcards (e.g., `*.example.com`). Because host name + * comparisons are case-insensitive, the returned subject name might also differ + * from the given `name` in capitalization. + * + * If the `'subject'` option is undefined or set to `'default'`, the certificate + * subject is only considered if the subject alternative name extension either does + * not exist or does not contain any DNS names. This behavior is consistent with [RFC 2818](https://www.rfc-editor.org/rfc/rfc2818.txt) ("HTTP Over TLS"). + * + * If the `'subject'` option is set to `'always'` and if the subject alternative + * name extension either does not exist or does not contain a matching DNS name, + * the certificate subject is considered. + * + * If the `'subject'` option is set to `'never'`, the certificate subject is never + * considered, even if the certificate contains no subject alternative names. + * @since v15.6.0 + * @return Returns a subject name that matches `name`, or `undefined` if no subject name matches `name`. + */ + checkHost(name: string, options?: X509CheckOptions): string | undefined; + /** + * Checks whether the certificate matches the given IP address (IPv4 or IPv6). + * + * Only [RFC 5280](https://www.rfc-editor.org/rfc/rfc5280.txt) `iPAddress` subject alternative names are considered, and they + * must match the given `ip` address exactly. Other subject alternative names as + * well as the subject field of the certificate are ignored. + * @since v15.6.0 + * @return Returns `ip` if the certificate matches, `undefined` if it does not. + */ + checkIP(ip: string): string | undefined; + /** + * Checks whether this certificate was issued by the given `otherCert`. + * @since v15.6.0 + */ + checkIssued(otherCert: X509Certificate): boolean; + /** + * Checks whether the public key for this certificate is consistent with + * the given private key. + * @since v15.6.0 + * @param privateKey A private key. + */ + checkPrivateKey(privateKey: KeyObject): boolean; + /** + * There is no standard JSON encoding for X509 certificates. The`toJSON()` method returns a string containing the PEM encoded + * certificate. + * @since v15.6.0 + */ + toJSON(): string; + /** + * Returns information about this certificate using the legacy `certificate object` encoding. + * @since v15.6.0 + */ + toLegacyObject(): PeerCertificate; + /** + * Returns the PEM-encoded certificate. + * @since v15.6.0 + */ + toString(): string; + /** + * Verifies that this certificate was signed by the given public key. + * Does not perform any other validation checks on the certificate. + * @since v15.6.0 + * @param publicKey A public key. + */ + verify(publicKey: KeyObject): boolean; + } + type LargeNumberLike = NodeJS.ArrayBufferView | SharedArrayBuffer | ArrayBuffer | bigint; + interface GeneratePrimeOptions { + add?: LargeNumberLike | undefined; + rem?: LargeNumberLike | undefined; + /** + * @default false + */ + safe?: boolean | undefined; + bigint?: boolean | undefined; + } + interface GeneratePrimeOptionsBigInt extends GeneratePrimeOptions { + bigint: true; + } + interface GeneratePrimeOptionsArrayBuffer extends GeneratePrimeOptions { + bigint?: false | undefined; + } + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrime(size: number, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime(size: number, options: GeneratePrimeOptionsBigInt, callback: (err: Error | null, prime: bigint) => void): void; + function generatePrime(size: number, options: GeneratePrimeOptionsArrayBuffer, callback: (err: Error | null, prime: ArrayBuffer) => void): void; + function generatePrime(size: number, options: GeneratePrimeOptions, callback: (err: Error | null, prime: ArrayBuffer | bigint) => void): void; + /** + * Generates a pseudorandom prime of `size` bits. + * + * If `options.safe` is `true`, the prime will be a safe prime -- that is,`(prime - 1) / 2` will also be a prime. + * + * The `options.add` and `options.rem` parameters can be used to enforce additional + * requirements, e.g., for Diffie-Hellman: + * + * * If `options.add` and `options.rem` are both set, the prime will satisfy the + * condition that `prime % add = rem`. + * * If only `options.add` is set and `options.safe` is not `true`, the prime will + * satisfy the condition that `prime % add = 1`. + * * If only `options.add` is set and `options.safe` is set to `true`, the prime + * will instead satisfy the condition that `prime % add = 3`. This is necessary + * because `prime % add = 1` for `options.add > 2` would contradict the condition + * enforced by `options.safe`. + * * `options.rem` is ignored if `options.add` is not given. + * + * Both `options.add` and `options.rem` must be encoded as big-endian sequences + * if given as an `ArrayBuffer`, `SharedArrayBuffer`, `TypedArray`, `Buffer`, or`DataView`. + * + * By default, the prime is encoded as a big-endian sequence of octets + * in an [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). If the `bigint` option is `true`, then a + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) is provided. + * @since v15.8.0 + * @param size The size (in bits) of the prime to generate. + */ + function generatePrimeSync(size: number): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsBigInt): bigint; + function generatePrimeSync(size: number, options: GeneratePrimeOptionsArrayBuffer): ArrayBuffer; + function generatePrimeSync(size: number, options: GeneratePrimeOptions): ArrayBuffer | bigint; + interface CheckPrimeOptions { + /** + * The number of Miller-Rabin probabilistic primality iterations to perform. + * When the value is 0 (zero), a number of checks is used that yields a false positive rate of at most 2-64 for random input. + * Care must be used when selecting a number of checks. + * Refer to the OpenSSL documentation for the BN_is_prime_ex function nchecks options for more details. + * + * @default 0 + */ + checks?: number | undefined; + } + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + */ + function checkPrime(value: LargeNumberLike, callback: (err: Error | null, result: boolean) => void): void; + function checkPrime(value: LargeNumberLike, options: CheckPrimeOptions, callback: (err: Error | null, result: boolean) => void): void; + /** + * Checks the primality of the `candidate`. + * @since v15.8.0 + * @param candidate A possible prime encoded as a sequence of big endian octets of arbitrary length. + * @return `true` if the candidate is a prime with an error probability less than `0.25 ** options.checks`. + */ + function checkPrimeSync(candidate: LargeNumberLike, options?: CheckPrimeOptions): boolean; + /** + * Load and set the `engine` for some or all OpenSSL functions (selected by flags). + * + * `engine` could be either an id or a path to the engine's shared library. + * + * The optional `flags` argument uses `ENGINE_METHOD_ALL` by default. + * The `flags` is a bit field taking one of or a mix of the following flags (defined in `crypto.constants`): + * + * - `crypto.constants.ENGINE_METHOD_RSA` + * - `crypto.constants.ENGINE_METHOD_DSA` + * - `crypto.constants.ENGINE_METHOD_DH` + * - `crypto.constants.ENGINE_METHOD_RAND` + * - `crypto.constants.ENGINE_METHOD_EC` + * - `crypto.constants.ENGINE_METHOD_CIPHERS` + * - `crypto.constants.ENGINE_METHOD_DIGESTS` + * - `crypto.constants.ENGINE_METHOD_PKEY_METHS` + * - `crypto.constants.ENGINE_METHOD_PKEY_ASN1_METHS` + * - `crypto.constants.ENGINE_METHOD_ALL` + * - `crypto.constants.ENGINE_METHOD_NONE` + * + * The flags below are deprecated in OpenSSL-1.1.0. + * + * - `crypto.constants.ENGINE_METHOD_ECDH` + * - `crypto.constants.ENGINE_METHOD_ECDSA` + * - `crypto.constants.ENGINE_METHOD_STORE` + * @since v0.11.11 + * @param [flags=crypto.constants.ENGINE_METHOD_ALL] + */ + function setEngine(engine: string, flags?: number): void; + /** + * A convenient alias for `crypto.webcrypto.getRandomValues()`. + * This implementation is not compliant with the Web Crypto spec, + * to write web-compatible code use `crypto.webcrypto.getRandomValues()` instead. + * @since v17.4.0 + * @returns Returns `typedArray`. + */ + function getRandomValues(typedArray: T): T; + /** + * A convenient alias for `crypto.webcrypto.subtle`. + * @since v17.4.0 + */ + const subtle: webcrypto.SubtleCrypto; + /** + * An implementation of the Web Crypto API standard. + * + * See the {@link https://nodejs.org/docs/latest/api/webcrypto.html Web Crypto API documentation} for details. + * @since v15.0.0 + */ + const webcrypto: webcrypto.Crypto; + namespace webcrypto { + type BufferSource = ArrayBufferView | ArrayBuffer; + type KeyFormat = 'jwk' | 'pkcs8' | 'raw' | 'spki'; + type KeyType = 'private' | 'public' | 'secret'; + type KeyUsage = 'decrypt' | 'deriveBits' | 'deriveKey' | 'encrypt' | 'sign' | 'unwrapKey' | 'verify' | 'wrapKey'; + type AlgorithmIdentifier = Algorithm | string; + type HashAlgorithmIdentifier = AlgorithmIdentifier; + type NamedCurve = string; + type BigInteger = Uint8Array; + interface AesCbcParams extends Algorithm { + iv: BufferSource; + } + interface AesCtrParams extends Algorithm { + counter: BufferSource; + length: number; + } + interface AesDerivedKeyParams extends Algorithm { + length: number; + } + interface AesGcmParams extends Algorithm { + additionalData?: BufferSource; + iv: BufferSource; + tagLength?: number; + } + interface AesKeyAlgorithm extends KeyAlgorithm { + length: number; + } + interface AesKeyGenParams extends Algorithm { + length: number; + } + interface Algorithm { + name: string; + } + interface EcKeyAlgorithm extends KeyAlgorithm { + namedCurve: NamedCurve; + } + interface EcKeyGenParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcKeyImportParams extends Algorithm { + namedCurve: NamedCurve; + } + interface EcdhKeyDeriveParams extends Algorithm { + public: CryptoKey; + } + interface EcdsaParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface Ed448Params extends Algorithm { + context?: BufferSource; + } + interface HkdfParams extends Algorithm { + hash: HashAlgorithmIdentifier; + info: BufferSource; + salt: BufferSource; + } + interface HmacImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface HmacKeyAlgorithm extends KeyAlgorithm { + hash: KeyAlgorithm; + length: number; + } + interface HmacKeyGenParams extends Algorithm { + hash: HashAlgorithmIdentifier; + length?: number; + } + interface JsonWebKey { + alg?: string; + crv?: string; + d?: string; + dp?: string; + dq?: string; + e?: string; + ext?: boolean; + k?: string; + key_ops?: string[]; + kty?: string; + n?: string; + oth?: RsaOtherPrimesInfo[]; + p?: string; + q?: string; + qi?: string; + use?: string; + x?: string; + y?: string; + } + interface KeyAlgorithm { + name: string; + } + interface Pbkdf2Params extends Algorithm { + hash: HashAlgorithmIdentifier; + iterations: number; + salt: BufferSource; + } + interface RsaHashedImportParams extends Algorithm { + hash: HashAlgorithmIdentifier; + } + interface RsaHashedKeyAlgorithm extends RsaKeyAlgorithm { + hash: KeyAlgorithm; + } + interface RsaHashedKeyGenParams extends RsaKeyGenParams { + hash: HashAlgorithmIdentifier; + } + interface RsaKeyAlgorithm extends KeyAlgorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaKeyGenParams extends Algorithm { + modulusLength: number; + publicExponent: BigInteger; + } + interface RsaOaepParams extends Algorithm { + label?: BufferSource; + } + interface RsaOtherPrimesInfo { + d?: string; + r?: string; + t?: string; + } + interface RsaPssParams extends Algorithm { + saltLength: number; + } + /** + * Calling `require('node:crypto').webcrypto` returns an instance of the `Crypto` class. + * `Crypto` is a singleton that provides access to the remainder of the crypto API. + * @since v15.0.0 + */ + interface Crypto { + /** + * Provides access to the `SubtleCrypto` API. + * @since v15.0.0 + */ + readonly subtle: SubtleCrypto; + /** + * Generates cryptographically strong random values. + * The given `typedArray` is filled with random values, and a reference to `typedArray` is returned. + * + * The given `typedArray` must be an integer-based instance of {@link NodeJS.TypedArray}, i.e. `Float32Array` and `Float64Array` are not accepted. + * + * An error will be thrown if the given `typedArray` is larger than 65,536 bytes. + * @since v15.0.0 + */ + getRandomValues>(typedArray: T): T; + /** + * Generates a random {@link https://www.rfc-editor.org/rfc/rfc4122.txt RFC 4122} version 4 UUID. + * The UUID is generated using a cryptographic pseudorandom number generator. + * @since v16.7.0 + */ + randomUUID(): string; + CryptoKey: CryptoKeyConstructor; + } + // This constructor throws ILLEGAL_CONSTRUCTOR so it should not be newable. + interface CryptoKeyConstructor { + /** Illegal constructor */ + (_: { readonly _: unique symbol }): never; // Allows instanceof to work but not be callable by the user. + readonly length: 0; + readonly name: 'CryptoKey'; + readonly prototype: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface CryptoKey { + /** + * An object detailing the algorithm for which the key can be used along with additional algorithm-specific parameters. + * @since v15.0.0 + */ + readonly algorithm: KeyAlgorithm; + /** + * When `true`, the {@link CryptoKey} can be extracted using either `subtleCrypto.exportKey()` or `subtleCrypto.wrapKey()`. + * @since v15.0.0 + */ + readonly extractable: boolean; + /** + * A string identifying whether the key is a symmetric (`'secret'`) or asymmetric (`'private'` or `'public'`) key. + * @since v15.0.0 + */ + readonly type: KeyType; + /** + * An array of strings identifying the operations for which the key may be used. + * + * The possible usages are: + * - `'encrypt'` - The key may be used to encrypt data. + * - `'decrypt'` - The key may be used to decrypt data. + * - `'sign'` - The key may be used to generate digital signatures. + * - `'verify'` - The key may be used to verify digital signatures. + * - `'deriveKey'` - The key may be used to derive a new key. + * - `'deriveBits'` - The key may be used to derive bits. + * - `'wrapKey'` - The key may be used to wrap another key. + * - `'unwrapKey'` - The key may be used to unwrap another key. + * + * Valid key usages depend on the key algorithm (identified by `cryptokey.algorithm.name`). + * @since v15.0.0 + */ + readonly usages: KeyUsage[]; + } + /** + * The `CryptoKeyPair` is a simple dictionary object with `publicKey` and `privateKey` properties, representing an asymmetric key pair. + * @since v15.0.0 + */ + interface CryptoKeyPair { + /** + * A {@link CryptoKey} whose type will be `'private'`. + * @since v15.0.0 + */ + privateKey: CryptoKey; + /** + * A {@link CryptoKey} whose type will be `'public'`. + * @since v15.0.0 + */ + publicKey: CryptoKey; + } + /** + * @since v15.0.0 + */ + interface SubtleCrypto { + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `key`, + * `subtle.decrypt()` attempts to decipher the provided `data`. If successful, + * the returned promise will be resolved with an `` containing the plaintext result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + decrypt(algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, key: CryptoKey, data: BufferSource): Promise; + /** + * Using the method and parameters specified in `algorithm` and the keying material provided by `baseKey`, + * `subtle.deriveBits()` attempts to generate `length` bits. + * The Node.js implementation requires that when `length` is a number it must be multiple of `8`. + * When `length` is `null` the maximum number of bits for a given algorithm is generated. This is allowed + * for the `'ECDH'`, `'X25519'`, and `'X448'` algorithms. + * If successful, the returned promise will be resolved with an `` containing the generated data. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @since v15.0.0 + */ + deriveBits(algorithm: EcdhKeyDeriveParams, baseKey: CryptoKey, length: number | null): Promise; + deriveBits(algorithm: AlgorithmIdentifier | HkdfParams | Pbkdf2Params, baseKey: CryptoKey, length: number): Promise; + /** + * Using the method and parameters specified in `algorithm`, and the keying material provided by `baseKey`, + * `subtle.deriveKey()` attempts to generate a new ` based on the method and parameters in `derivedKeyAlgorithm`. + * + * Calling `subtle.deriveKey()` is equivalent to calling `subtle.deriveBits()` to generate raw keying material, + * then passing the result into the `subtle.importKey()` method using the `deriveKeyAlgorithm`, `extractable`, and `keyUsages` parameters as input. + * + * The algorithms currently supported include: + * + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HKDF'` + * - `'PBKDF2'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + deriveKey( + algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, + baseKey: CryptoKey, + derivedKeyAlgorithm: AlgorithmIdentifier | AesDerivedKeyParams | HmacImportParams | HkdfParams | Pbkdf2Params, + extractable: boolean, + keyUsages: ReadonlyArray + ): Promise; + /** + * Using the method identified by `algorithm`, `subtle.digest()` attempts to generate a digest of `data`. + * If successful, the returned promise is resolved with an `` containing the computed digest. + * + * If `algorithm` is provided as a ``, it must be one of: + * + * - `'SHA-1'` + * - `'SHA-256'` + * - `'SHA-384'` + * - `'SHA-512'` + * + * If `algorithm` is provided as an ``, it must have a `name` property whose value is one of the above. + * @since v15.0.0 + */ + digest(algorithm: AlgorithmIdentifier, data: BufferSource): Promise; + /** + * Using the method and parameters specified by `algorithm` and the keying material provided by `key`, + * `subtle.encrypt()` attempts to encipher `data`. If successful, + * the returned promise is resolved with an `` containing the encrypted result. + * + * The algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * @since v15.0.0 + */ + encrypt(algorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, key: CryptoKey, data: BufferSource): Promise; + /** + * Exports the given key into the specified format, if supported. + * + * If the `` is not extractable, the returned promise will reject. + * + * When `format` is either `'pkcs8'` or `'spki'` and the export is successful, + * the returned promise will be resolved with an `` containing the exported key data. + * + * When `format` is `'jwk'` and the export is successful, the returned promise will be resolved with a + * JavaScript object conforming to the {@link https://tools.ietf.org/html/rfc7517 JSON Web Key} specification. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @returns `` containing ``. + * @since v15.0.0 + */ + exportKey(format: 'jwk', key: CryptoKey): Promise; + exportKey(format: Exclude, key: CryptoKey): Promise; + /** + * Using the method and parameters provided in `algorithm`, + * `subtle.generateKey()` attempts to generate new keying material. + * Depending the method used, the method may generate either a single `` or a ``. + * + * The `` (public and private key) generating algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * The `` (secret key) generating algorithms supported include: + * + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + generateKey(algorithm: RsaHashedKeyGenParams | EcKeyGenParams, extractable: boolean, keyUsages: ReadonlyArray): Promise; + generateKey(algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, extractable: boolean, keyUsages: ReadonlyArray): Promise; + generateKey(algorithm: AlgorithmIdentifier, extractable: boolean, keyUsages: KeyUsage[]): Promise; + /** + * The `subtle.importKey()` method attempts to interpret the provided `keyData` as the given `format` + * to create a `` instance using the provided `algorithm`, `extractable`, and `keyUsages` arguments. + * If the import is successful, the returned promise will be resolved with the created ``. + * + * If importing a `'PBKDF2'` key, `extractable` must be `false`. + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + importKey( + format: 'jwk', + keyData: JsonWebKey, + algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, + extractable: boolean, + keyUsages: ReadonlyArray + ): Promise; + importKey( + format: Exclude, + keyData: BufferSource, + algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[] + ): Promise; + /** + * Using the method and parameters given by `algorithm` and the keying material provided by `key`, + * `subtle.sign()` attempts to generate a cryptographic signature of `data`. If successful, + * the returned promise is resolved with an `` containing the generated signature. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + sign(algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, key: CryptoKey, data: BufferSource): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.unwrapKey()` method attempts to decrypt a wrapped key and create a `` instance. + * It is equivalent to calling `subtle.decrypt()` first on the encrypted key data (using the `wrappedKey`, `unwrapAlgo`, and `unwrappingKey` arguments as input) + * then passing the results in to the `subtle.importKey()` method using the `unwrappedKeyAlgo`, `extractable`, and `keyUsages` arguments as inputs. + * If successful, the returned promise is resolved with a `` object. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * + * The unwrapped key algorithms supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'RSA-OAEP'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'ECDH'` + * - `'X25519'` + * - `'X448'` + * - `'HMAC'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @param keyUsages See {@link https://nodejs.org/docs/latest/api/webcrypto.html#cryptokeyusages Key usages}. + * @since v15.0.0 + */ + unwrapKey( + format: KeyFormat, + wrappedKey: BufferSource, + unwrappingKey: CryptoKey, + unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, + unwrappedKeyAlgorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, + extractable: boolean, + keyUsages: KeyUsage[] + ): Promise; + /** + * Using the method and parameters given in `algorithm` and the keying material provided by `key`, + * `subtle.verify()` attempts to verify that `signature` is a valid cryptographic signature of `data`. + * The returned promise is resolved with either `true` or `false`. + * + * The algorithms currently supported include: + * + * - `'RSASSA-PKCS1-v1_5'` + * - `'RSA-PSS'` + * - `'ECDSA'` + * - `'Ed25519'` + * - `'Ed448'` + * - `'HMAC'` + * @since v15.0.0 + */ + verify(algorithm: AlgorithmIdentifier | RsaPssParams | EcdsaParams | Ed448Params, key: CryptoKey, signature: BufferSource, data: BufferSource): Promise; + /** + * In cryptography, "wrapping a key" refers to exporting and then encrypting the keying material. + * The `subtle.wrapKey()` method exports the keying material into the format identified by `format`, + * then encrypts it using the method and parameters specified by `wrapAlgo` and the keying material provided by `wrappingKey`. + * It is the equivalent to calling `subtle.exportKey()` using `format` and `key` as the arguments, + * then passing the result to the `subtle.encrypt()` method using `wrappingKey` and `wrapAlgo` as inputs. + * If successful, the returned promise will be resolved with an `` containing the encrypted key data. + * + * The wrapping algorithms currently supported include: + * + * - `'RSA-OAEP'` + * - `'AES-CTR'` + * - `'AES-CBC'` + * - `'AES-GCM'` + * - `'AES-KW'` + * @param format Must be one of `'raw'`, `'pkcs8'`, `'spki'`, or `'jwk'`. + * @since v15.0.0 + */ + wrapKey(format: KeyFormat, key: CryptoKey, wrappingKey: CryptoKey, wrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams): Promise; + } + } +} +declare module 'node:crypto' { + export * from 'crypto'; +} diff --git a/node_modules/@types/node/ts4.8/dgram.d.ts b/node_modules/@types/node/ts4.8/dgram.d.ts new file mode 100755 index 0000000..247328d --- /dev/null +++ b/node_modules/@types/node/ts4.8/dgram.d.ts @@ -0,0 +1,545 @@ +/** + * The `dgram` module provides an implementation of UDP datagram sockets. + * + * ```js + * import dgram from 'dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.log(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/dgram.js) + */ +declare module 'dgram' { + import { AddressInfo } from 'node:net'; + import * as dns from 'node:dns'; + import { EventEmitter, Abortable } from 'node:events'; + interface RemoteInfo { + address: string; + family: 'IPv4' | 'IPv6'; + port: number; + size: number; + } + interface BindOptions { + port?: number | undefined; + address?: string | undefined; + exclusive?: boolean | undefined; + fd?: number | undefined; + } + type SocketType = 'udp4' | 'udp6'; + interface SocketOptions extends Abortable { + type: SocketType; + reuseAddr?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + recvBufferSize?: number | undefined; + sendBufferSize?: number | undefined; + lookup?: ((hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void) | undefined; + } + /** + * Creates a `dgram.Socket` object. Once the socket is created, calling `socket.bind()` will instruct the socket to begin listening for datagram + * messages. When `address` and `port` are not passed to `socket.bind()` the + * method will bind the socket to the "all interfaces" address on a random port + * (it does the right thing for both `udp4` and `udp6` sockets). The bound address + * and port can be retrieved using `socket.address().address` and `socket.address().port`. + * + * If the `signal` option is enabled, calling `.abort()` on the corresponding`AbortController` is similar to calling `.close()` on the socket: + * + * ```js + * const controller = new AbortController(); + * const { signal } = controller; + * const server = dgram.createSocket({ type: 'udp4', signal }); + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * // Later, when you want to close the server. + * controller.abort(); + * ``` + * @since v0.11.13 + * @param options Available options are: + * @param callback Attached as a listener for `'message'` events. Optional. + */ + function createSocket(type: SocketType, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; + /** + * Encapsulates the datagram functionality. + * + * New instances of `dgram.Socket` are created using {@link createSocket}. + * The `new` keyword is not to be used to create `dgram.Socket` instances. + * @since v0.1.99 + */ + class Socket extends EventEmitter { + /** + * Tells the kernel to join a multicast group at the given `multicastAddress` and`multicastInterface` using the `IP_ADD_MEMBERSHIP` socket option. If the`multicastInterface` argument is not + * specified, the operating system will choose + * one interface and will add membership to it. To add membership to every + * available interface, call `addMembership` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * + * When sharing a UDP socket across multiple `cluster` workers, the`socket.addMembership()` function must be called only once or an`EADDRINUSE` error will occur: + * + * ```js + * import cluster from 'cluster'; + * import dgram from 'dgram'; + * + * if (cluster.isPrimary) { + * cluster.fork(); // Works ok. + * cluster.fork(); // Fails with EADDRINUSE. + * } else { + * const s = dgram.createSocket('udp4'); + * s.bind(1234, () => { + * s.addMembership('224.0.0.114'); + * }); + * } + * ``` + * @since v0.6.9 + */ + addMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * Returns an object containing the address information for a socket. + * For UDP sockets, this object will contain `address`, `family` and `port`properties. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.99 + */ + address(): AddressInfo; + /** + * For UDP sockets, causes the `dgram.Socket` to listen for datagram + * messages on a named `port` and optional `address`. If `port` is not + * specified or is `0`, the operating system will attempt to bind to a + * random port. If `address` is not specified, the operating system will + * attempt to listen on all addresses. Once binding is complete, a`'listening'` event is emitted and the optional `callback` function is + * called. + * + * Specifying both a `'listening'` event listener and passing a`callback` to the `socket.bind()` method is not harmful but not very + * useful. + * + * A bound datagram socket keeps the Node.js process running to receive + * datagram messages. + * + * If binding fails, an `'error'` event is generated. In rare case (e.g. + * attempting to bind with a closed socket), an `Error` may be thrown. + * + * Example of a UDP server listening on port 41234: + * + * ```js + * import dgram from 'dgram'; + * + * const server = dgram.createSocket('udp4'); + * + * server.on('error', (err) => { + * console.log(`server error:\n${err.stack}`); + * server.close(); + * }); + * + * server.on('message', (msg, rinfo) => { + * console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); + * }); + * + * server.on('listening', () => { + * const address = server.address(); + * console.log(`server listening ${address.address}:${address.port}`); + * }); + * + * server.bind(41234); + * // Prints: server listening 0.0.0.0:41234 + * ``` + * @since v0.1.99 + * @param callback with no parameters. Called when binding is complete. + */ + bind(port?: number, address?: string, callback?: () => void): this; + bind(port?: number, callback?: () => void): this; + bind(callback?: () => void): this; + bind(options: BindOptions, callback?: () => void): this; + /** + * Close the underlying socket and stop listening for data on it. If a callback is + * provided, it is added as a listener for the `'close'` event. + * @since v0.1.99 + * @param callback Called when the socket has been closed. + */ + close(callback?: () => void): this; + /** + * Associates the `dgram.Socket` to a remote address and port. Every + * message sent by this handle is automatically sent to that destination. Also, + * the socket will only receive messages from that remote peer. + * Trying to call `connect()` on an already connected socket will result + * in an `ERR_SOCKET_DGRAM_IS_CONNECTED` exception. If `address` is not + * provided, `'127.0.0.1'` (for `udp4` sockets) or `'::1'` (for `udp6` sockets) + * will be used by default. Once the connection is complete, a `'connect'` event + * is emitted and the optional `callback` function is called. In case of failure, + * the `callback` is called or, failing this, an `'error'` event is emitted. + * @since v12.0.0 + * @param callback Called when the connection is completed or on error. + */ + connect(port: number, address?: string, callback?: () => void): void; + connect(port: number, callback: () => void): void; + /** + * A synchronous function that disassociates a connected `dgram.Socket` from + * its remote address. Trying to call `disconnect()` on an unbound or already + * disconnected socket will result in an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception. + * @since v12.0.0 + */ + disconnect(): void; + /** + * Instructs the kernel to leave a multicast group at `multicastAddress` using the`IP_DROP_MEMBERSHIP` socket option. This method is automatically called by the + * kernel when the socket is closed or the process terminates, so most apps will + * never have reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v0.6.9 + */ + dropMembership(multicastAddress: string, multicastInterface?: string): void; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_RCVBUF` socket receive buffer size in bytes. + */ + getRecvBufferSize(): number; + /** + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + * @return the `SO_SNDBUF` socket send buffer size in bytes. + */ + getSendBufferSize(): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active. The `socket.ref()` method adds the socket back to the reference + * counting and restores the default behavior. + * + * Calling `socket.ref()` multiples times will have no additional effect. + * + * The `socket.ref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + ref(): this; + /** + * Returns an object containing the `address`, `family`, and `port` of the remote + * endpoint. This method throws an `ERR_SOCKET_DGRAM_NOT_CONNECTED` exception + * if the socket is not connected. + * @since v12.0.0 + */ + remoteAddress(): AddressInfo; + /** + * Broadcasts a datagram on the socket. + * For connectionless sockets, the destination `port` and `address` must be + * specified. Connected sockets, on the other hand, will use their associated + * remote endpoint, so the `port` and `address` arguments must not be set. + * + * The `msg` argument contains the message to be sent. + * Depending on its type, different behavior can apply. If `msg` is a `Buffer`, + * any `TypedArray` or a `DataView`, + * the `offset` and `length` specify the offset within the `Buffer` where the + * message begins and the number of bytes in the message, respectively. + * If `msg` is a `String`, then it is automatically converted to a `Buffer`with `'utf8'` encoding. With messages that + * contain multi-byte characters, `offset` and `length` will be calculated with + * respect to `byte length` and not the character position. + * If `msg` is an array, `offset` and `length` must not be specified. + * + * The `address` argument is a string. If the value of `address` is a host name, + * DNS will be used to resolve the address of the host. If `address` is not + * provided or otherwise nullish, `'127.0.0.1'` (for `udp4` sockets) or `'::1'`(for `udp6` sockets) will be used by default. + * + * If the socket has not been previously bound with a call to `bind`, the socket + * is assigned a random port number and is bound to the "all interfaces" address + * (`'0.0.0.0'` for `udp4` sockets, `'::0'` for `udp6` sockets.) + * + * An optional `callback` function may be specified to as a way of reporting + * DNS errors or for determining when it is safe to reuse the `buf` object. + * DNS lookups delay the time to send for at least one tick of the + * Node.js event loop. + * + * The only way to know for sure that the datagram has been sent is by using a`callback`. If an error occurs and a `callback` is given, the error will be + * passed as the first argument to the `callback`. If a `callback` is not given, + * the error is emitted as an `'error'` event on the `socket` object. + * + * Offset and length are optional but both _must_ be set if either are used. + * They are supported only when the first argument is a `Buffer`, a `TypedArray`, + * or a `DataView`. + * + * This method throws `ERR_SOCKET_BAD_PORT` if called on an unbound socket. + * + * Example of sending a UDP packet to a port on `localhost`; + * + * ```js + * import dgram from 'dgram'; + * import { Buffer } from 'buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.send(message, 41234, 'localhost', (err) => { + * client.close(); + * }); + * ``` + * + * Example of sending a UDP packet composed of multiple buffers to a port on`127.0.0.1`; + * + * ```js + * import dgram from 'dgram'; + * import { Buffer } from 'buffer'; + * + * const buf1 = Buffer.from('Some '); + * const buf2 = Buffer.from('bytes'); + * const client = dgram.createSocket('udp4'); + * client.send([buf1, buf2], 41234, (err) => { + * client.close(); + * }); + * ``` + * + * Sending multiple buffers might be faster or slower depending on the + * application and operating system. Run benchmarks to + * determine the optimal strategy on a case-by-case basis. Generally speaking, + * however, sending multiple buffers is faster. + * + * Example of sending a UDP packet using a socket connected to a port on`localhost`: + * + * ```js + * import dgram from 'dgram'; + * import { Buffer } from 'buffer'; + * + * const message = Buffer.from('Some bytes'); + * const client = dgram.createSocket('udp4'); + * client.connect(41234, 'localhost', (err) => { + * client.send(message, (err) => { + * client.close(); + * }); + * }); + * ``` + * @since v0.1.99 + * @param msg Message to be sent. + * @param offset Offset in the buffer where the message starts. + * @param length Number of bytes in the message. + * @param port Destination port. + * @param address Destination host name or IP address. + * @param callback Called when the message has been sent. + */ + send(msg: string | Uint8Array | ReadonlyArray, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array | ReadonlyArray, port?: number, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array | ReadonlyArray, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array, offset: number, length: number, port?: number, address?: string, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array, offset: number, length: number, port?: number, callback?: (error: Error | null, bytes: number) => void): void; + send(msg: string | Uint8Array, offset: number, length: number, callback?: (error: Error | null, bytes: number) => void): void; + /** + * Sets or clears the `SO_BROADCAST` socket option. When set to `true`, UDP + * packets may be sent to a local interface's broadcast address. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.6.9 + */ + setBroadcast(flag: boolean): void; + /** + * _All references to scope in this section are referring to [IPv6 Zone Indices](https://en.wikipedia.org/wiki/IPv6_address#Scoped_literal_IPv6_addresses), which are defined by [RFC + * 4007](https://tools.ietf.org/html/rfc4007). In string form, an IP_ + * _with a scope index is written as `'IP%scope'` where scope is an interface name_ + * _or interface number._ + * + * Sets the default outgoing multicast interface of the socket to a chosen + * interface or back to system interface selection. The `multicastInterface` must + * be a valid string representation of an IP from the socket's family. + * + * For IPv4 sockets, this should be the IP configured for the desired physical + * interface. All packets sent to multicast on the socket will be sent on the + * interface determined by the most recent successful use of this call. + * + * For IPv6 sockets, `multicastInterface` should include a scope to indicate the + * interface as in the examples that follow. In IPv6, individual `send` calls can + * also use explicit scope in addresses, so only packets sent to a multicast + * address without specifying an explicit scope are affected by the most recent + * successful use of this call. + * + * This method throws `EBADF` if called on an unbound socket. + * + * #### Example: IPv6 outgoing multicast interface + * + * On most systems, where scope format uses the interface name: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%eth1'); + * }); + * ``` + * + * On Windows, where scope format uses an interface number: + * + * ```js + * const socket = dgram.createSocket('udp6'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('::%2'); + * }); + * ``` + * + * #### Example: IPv4 outgoing multicast interface + * + * All systems use an IP of the host on the desired physical interface: + * + * ```js + * const socket = dgram.createSocket('udp4'); + * + * socket.bind(1234, () => { + * socket.setMulticastInterface('10.0.0.2'); + * }); + * ``` + * @since v8.6.0 + */ + setMulticastInterface(multicastInterface: string): void; + /** + * Sets or clears the `IP_MULTICAST_LOOP` socket option. When set to `true`, + * multicast packets will also be received on the local interface. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastLoopback(flag: boolean): boolean; + /** + * Sets the `IP_MULTICAST_TTL` socket option. While TTL generally stands for + * "Time to Live", in this context it specifies the number of IP hops that a + * packet is allowed to travel through, specifically for multicast traffic. Each + * router or gateway that forwards a packet decrements the TTL. If the TTL is + * decremented to 0 by a router, it will not be forwarded. + * + * The `ttl` argument may be between 0 and 255\. The default on most systems is `1`. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.3.8 + */ + setMulticastTTL(ttl: number): number; + /** + * Sets the `SO_RCVBUF` socket option. Sets the maximum socket receive buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setRecvBufferSize(size: number): void; + /** + * Sets the `SO_SNDBUF` socket option. Sets the maximum socket send buffer + * in bytes. + * + * This method throws `ERR_SOCKET_BUFFER_SIZE` if called on an unbound socket. + * @since v8.7.0 + */ + setSendBufferSize(size: number): void; + /** + * Sets the `IP_TTL` socket option. While TTL generally stands for "Time to Live", + * in this context it specifies the number of IP hops that a packet is allowed to + * travel through. Each router or gateway that forwards a packet decrements the + * TTL. If the TTL is decremented to 0 by a router, it will not be forwarded. + * Changing TTL values is typically done for network probes or when multicasting. + * + * The `ttl` argument may be between 1 and 255\. The default on most systems + * is 64. + * + * This method throws `EBADF` if called on an unbound socket. + * @since v0.1.101 + */ + setTTL(ttl: number): number; + /** + * By default, binding a socket will cause it to block the Node.js process from + * exiting as long as the socket is open. The `socket.unref()` method can be used + * to exclude the socket from the reference counting that keeps the Node.js + * process active, allowing the process to exit even if the socket is still + * listening. + * + * Calling `socket.unref()` multiple times will have no addition effect. + * + * The `socket.unref()` method returns a reference to the socket so calls can be + * chained. + * @since v0.9.1 + */ + unref(): this; + /** + * Tells the kernel to join a source-specific multicast channel at the given`sourceAddress` and `groupAddress`, using the `multicastInterface` with the`IP_ADD_SOURCE_MEMBERSHIP` socket + * option. If the `multicastInterface` argument + * is not specified, the operating system will choose one interface and will add + * membership to it. To add membership to every available interface, call`socket.addSourceSpecificMembership()` multiple times, once per interface. + * + * When called on an unbound socket, this method will implicitly bind to a random + * port, listening on all interfaces. + * @since v13.1.0, v12.16.0 + */ + addSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * Instructs the kernel to leave a source-specific multicast channel at the given`sourceAddress` and `groupAddress` using the `IP_DROP_SOURCE_MEMBERSHIP`socket option. This method is + * automatically called by the kernel when the + * socket is closed or the process terminates, so most apps will never have + * reason to call this. + * + * If `multicastInterface` is not specified, the operating system will attempt to + * drop membership on all valid interfaces. + * @since v13.1.0, v12.16.0 + */ + dropSourceSpecificMembership(sourceAddress: string, groupAddress: string, multicastInterface?: string): void; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. error + * 4. listening + * 5. message + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connect', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'connect'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit(event: 'message', msg: Buffer, rinfo: RemoteInfo): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connect', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connect', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connect', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connect', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'message', listener: (msg: Buffer, rinfo: RemoteInfo) => void): this; + } +} +declare module 'node:dgram' { + export * from 'dgram'; +} diff --git a/node_modules/@types/node/ts4.8/diagnostics_channel.d.ts b/node_modules/@types/node/ts4.8/diagnostics_channel.d.ts new file mode 100755 index 0000000..a87ba8c --- /dev/null +++ b/node_modules/@types/node/ts4.8/diagnostics_channel.d.ts @@ -0,0 +1,153 @@ +/** + * The `diagnostics_channel` module provides an API to create named channels + * to report arbitrary message data for diagnostics purposes. + * + * It can be accessed using: + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * ``` + * + * It is intended that a module writer wanting to report diagnostics messages + * will create one or many top-level channels to report messages through. + * Channels may also be acquired at runtime but it is not encouraged + * due to the additional overhead of doing so. Channels may be exported for + * convenience, but as long as the name is known it can be acquired anywhere. + * + * If you intend for your module to produce diagnostics data for others to + * consume it is recommended that you include documentation of what named + * channels are used along with the shape of the message data. Channel names + * should generally include the module name to avoid collisions with data from + * other modules. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/diagnostics_channel.js) + */ +declare module 'diagnostics_channel' { + /** + * Check if there are active subscribers to the named channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * if (diagnostics_channel.hasSubscribers('my-channel')) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return If there are active subscribers + */ + function hasSubscribers(name: string): boolean; + /** + * This is the primary entry-point for anyone wanting to interact with a named + * channel. It produces a channel object which is optimized to reduce overhead at + * publish time as much as possible. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * ``` + * @since v15.1.0, v14.17.0 + * @param name The channel name + * @return The named channel object + */ + function channel(name: string): Channel; + type ChannelListener = (message: unknown, name: string) => void; + /** + * The class `Channel` represents an individual named channel within the data + * pipeline. It is use to track subscribers and to publish messages when there + * are subscribers present. It exists as a separate object to avoid channel + * lookups at publish time, enabling very fast publish speeds and allowing + * for heavy use while incurring very minimal cost. Channels are created with {@link channel}, constructing a channel directly + * with `new Channel(name)` is not supported. + * @since v15.1.0, v14.17.0 + */ + class Channel { + readonly name: string; + /** + * Check if there are active subscribers to this channel. This is helpful if + * the message you want to send might be expensive to prepare. + * + * This API is optional but helpful when trying to publish messages from very + * performance-sensitive code. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * if (channel.hasSubscribers) { + * // There are subscribers, prepare and publish message + * } + * ``` + * @since v15.1.0, v14.17.0 + */ + readonly hasSubscribers: boolean; + private constructor(name: string); + /** + * Publish a message to any subscribers to the channel. This will + * trigger message handlers synchronously so they will execute within + * the same context. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.publish({ + * some: 'message' + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param message The message to send to the channel subscribers + */ + publish(message: unknown): void; + /** + * Register a message handler to subscribe to this channel. This message handler + * will be run synchronously whenever a message is published to the channel. Any + * errors thrown in the message handler will trigger an `'uncaughtException'`. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * channel.subscribe((message, name) => { + * // Received data + * }); + * ``` + * @since v15.1.0, v14.17.0 + * @param onMessage The handler to receive channel messages + */ + subscribe(onMessage: ChannelListener): void; + /** + * Remove a message handler previously registered to this channel with `channel.subscribe(onMessage)`. + * + * ```js + * import diagnostics_channel from 'diagnostics_channel'; + * + * const channel = diagnostics_channel.channel('my-channel'); + * + * function onMessage(message, name) { + * // Received data + * } + * + * channel.subscribe(onMessage); + * + * channel.unsubscribe(onMessage); + * ``` + * @since v15.1.0, v14.17.0 + * @param onMessage The previous subscribed handler to remove + * @return `true` if the handler was found, `false` otherwise. + */ + unsubscribe(onMessage: ChannelListener): void; + } +} +declare module 'node:diagnostics_channel' { + export * from 'diagnostics_channel'; +} diff --git a/node_modules/@types/node/ts4.8/dns.d.ts b/node_modules/@types/node/ts4.8/dns.d.ts new file mode 100755 index 0000000..305367b --- /dev/null +++ b/node_modules/@types/node/ts4.8/dns.d.ts @@ -0,0 +1,659 @@ +/** + * The `dns` module enables name resolution. For example, use it to look up IP + * addresses of host names. + * + * Although named for the [Domain Name System (DNS)](https://en.wikipedia.org/wiki/Domain_Name_System), it does not always use the + * DNS protocol for lookups. {@link lookup} uses the operating system + * facilities to perform name resolution. It may not need to perform any network + * communication. To perform name resolution the way other applications on the same + * system do, use {@link lookup}. + * + * ```js + * const dns = require('dns'); + * + * dns.lookup('example.org', (err, address, family) => { + * console.log('address: %j family: IPv%s', address, family); + * }); + * // address: "93.184.216.34" family: IPv4 + * ``` + * + * All other functions in the `dns` module connect to an actual DNS server to + * perform name resolution. They will always use the network to perform DNS + * queries. These functions do not use the same set of configuration files used by {@link lookup} (e.g. `/etc/hosts`). Use these functions to always perform + * DNS queries, bypassing other name-resolution facilities. + * + * ```js + * const dns = require('dns'); + * + * dns.resolve4('archive.org', (err, addresses) => { + * if (err) throw err; + * + * console.log(`addresses: ${JSON.stringify(addresses)}`); + * + * addresses.forEach((a) => { + * dns.reverse(a, (err, hostnames) => { + * if (err) { + * throw err; + * } + * console.log(`reverse for ${a}: ${JSON.stringify(hostnames)}`); + * }); + * }); + * }); + * ``` + * + * See the `Implementation considerations section` for more information. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/dns.js) + */ +declare module 'dns' { + import * as dnsPromises from 'node:dns/promises'; + // Supported getaddrinfo flags. + export const ADDRCONFIG: number; + export const V4MAPPED: number; + /** + * If `dns.V4MAPPED` is specified, return resolved IPv6 addresses as + * well as IPv4 mapped IPv6 addresses. + */ + export const ALL: number; + export interface LookupOptions { + family?: number | undefined; + hints?: number | undefined; + all?: boolean | undefined; + /** + * @default true + */ + verbatim?: boolean | undefined; + } + export interface LookupOneOptions extends LookupOptions { + all?: false | undefined; + } + export interface LookupAllOptions extends LookupOptions { + all: true; + } + export interface LookupAddress { + address: string; + family: number; + } + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the arguments for `callback` change to`(err, addresses)`, with `addresses` being an array of objects with the + * properties `address` and `family`. + * + * On error, `err` is an `Error` object, where `err.code` is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dns.lookup()` does not necessarily have anything to do with the DNS protocol. + * The implementation uses an operating system facility that can associate names + * with addresses, and vice versa. This implementation can have subtle but + * important consequences on the behavior of any Node.js program. Please take some + * time to consult the `Implementation considerations section` before using`dns.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('dns'); + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * dns.lookup('example.com', options, (err, address, family) => + * console.log('address: %j family: IPv%s', address, family)); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dns.lookup('example.com', options, (err, addresses) => + * console.log('addresses: %j', addresses)); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, and `all`is not set to `true`, it returns a `Promise` for an `Object` with `address` and`family` properties. + * @since v0.1.90 + */ + export function lookup(hostname: string, family: number, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void): void; + export function lookup(hostname: string, options: LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void): void; + export function lookup(hostname: string, options: LookupAllOptions, callback: (err: NodeJS.ErrnoException | null, addresses: LookupAddress[]) => void): void; + export function lookup(hostname: string, options: LookupOptions, callback: (err: NodeJS.ErrnoException | null, address: string | LookupAddress[], family: number) => void): void; + export function lookup(hostname: string, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void): void; + export namespace lookup { + function __promisify__(hostname: string, options: LookupAllOptions): Promise; + function __promisify__(hostname: string, options?: LookupOneOptions | number): Promise; + function __promisify__(hostname: string, options: LookupOptions): Promise; + } + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On an error, `err` is an `Error` object, where `err.code` is the error code. + * + * ```js + * const dns = require('dns'); + * dns.lookupService('127.0.0.1', 22, (err, hostname, service) => { + * console.log(hostname, service); + * // Prints: localhost ssh + * }); + * ``` + * + * If this method is invoked as its `util.promisify()` ed version, it returns a`Promise` for an `Object` with `hostname` and `service` properties. + * @since v0.11.14 + */ + export function lookupService(address: string, port: number, callback: (err: NodeJS.ErrnoException | null, hostname: string, service: string) => void): void; + export namespace lookupService { + function __promisify__( + address: string, + port: number + ): Promise<{ + hostname: string; + service: string; + }>; + } + export interface ResolveOptions { + ttl: boolean; + } + export interface ResolveWithTtlOptions extends ResolveOptions { + ttl: true; + } + export interface RecordWithTtl { + address: string; + ttl: number; + } + /** @deprecated Use `AnyARecord` or `AnyAaaaRecord` instead. */ + export type AnyRecordWithTtl = AnyARecord | AnyAaaaRecord; + export interface AnyARecord extends RecordWithTtl { + type: 'A'; + } + export interface AnyAaaaRecord extends RecordWithTtl { + type: 'AAAA'; + } + export interface CaaRecord { + critial: number; + issue?: string | undefined; + issuewild?: string | undefined; + iodef?: string | undefined; + contactemail?: string | undefined; + contactphone?: string | undefined; + } + export interface MxRecord { + priority: number; + exchange: string; + } + export interface AnyMxRecord extends MxRecord { + type: 'MX'; + } + export interface NaptrRecord { + flags: string; + service: string; + regexp: string; + replacement: string; + order: number; + preference: number; + } + export interface AnyNaptrRecord extends NaptrRecord { + type: 'NAPTR'; + } + export interface SoaRecord { + nsname: string; + hostmaster: string; + serial: number; + refresh: number; + retry: number; + expire: number; + minttl: number; + } + export interface AnySoaRecord extends SoaRecord { + type: 'SOA'; + } + export interface SrvRecord { + priority: number; + weight: number; + port: number; + name: string; + } + export interface AnySrvRecord extends SrvRecord { + type: 'SRV'; + } + export interface AnyTxtRecord { + type: 'TXT'; + entries: string[]; + } + export interface AnyNsRecord { + type: 'NS'; + value: string; + } + export interface AnyPtrRecord { + type: 'PTR'; + value: string; + } + export interface AnyCnameRecord { + type: 'CNAME'; + value: string; + } + export type AnyRecord = AnyARecord | AnyAaaaRecord | AnyCnameRecord | AnyMxRecord | AnyNaptrRecord | AnyNsRecord | AnyPtrRecord | AnySoaRecord | AnySrvRecord | AnyTxtRecord; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. The `callback` function has arguments`(err, records)`. When successful, `records` will be an array of resource + * records. The type and structure of individual results varies based on `rrtype`: + * + * + * + * On error, `err` is an `Error` object, where `err.code` is one of the `DNS error codes`. + * @since v0.1.27 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + export function resolve(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'A', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'AAAA', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'ANY', callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'CNAME', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'MX', callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'NAPTR', callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'NS', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'PTR', callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve(hostname: string, rrtype: 'SOA', callback: (err: NodeJS.ErrnoException | null, addresses: SoaRecord) => void): void; + export function resolve(hostname: string, rrtype: 'SRV', callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void): void; + export function resolve(hostname: string, rrtype: 'TXT', callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void): void; + export function resolve( + hostname: string, + rrtype: string, + callback: (err: NodeJS.ErrnoException | null, addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][] | AnyRecord[]) => void + ): void; + export namespace resolve { + function __promisify__(hostname: string, rrtype?: 'A' | 'AAAA' | 'CNAME' | 'NS' | 'PTR'): Promise; + function __promisify__(hostname: string, rrtype: 'ANY'): Promise; + function __promisify__(hostname: string, rrtype: 'MX'): Promise; + function __promisify__(hostname: string, rrtype: 'NAPTR'): Promise; + function __promisify__(hostname: string, rrtype: 'SOA'): Promise; + function __promisify__(hostname: string, rrtype: 'SRV'): Promise; + function __promisify__(hostname: string, rrtype: 'TXT'): Promise; + function __promisify__(hostname: string, rrtype: string): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve4(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve4(hostname: string, options: ResolveWithTtlOptions, callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void): void; + export function resolve4(hostname: string, options: ResolveOptions, callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void): void; + export namespace resolve4 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve a IPv6 addresses (`AAAA` records) for the`hostname`. The `addresses` argument passed to the `callback` function + * will contain an array of IPv6 addresses. + * @since v0.1.16 + * @param hostname Host name to resolve. + */ + export function resolve6(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export function resolve6(hostname: string, options: ResolveWithTtlOptions, callback: (err: NodeJS.ErrnoException | null, addresses: RecordWithTtl[]) => void): void; + export function resolve6(hostname: string, options: ResolveOptions, callback: (err: NodeJS.ErrnoException | null, addresses: string[] | RecordWithTtl[]) => void): void; + export namespace resolve6 { + function __promisify__(hostname: string): Promise; + function __promisify__(hostname: string, options: ResolveWithTtlOptions): Promise; + function __promisify__(hostname: string, options?: ResolveOptions): Promise; + } + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`). + * @since v0.3.2 + */ + export function resolveCname(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export namespace resolveCname { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function + * will contain an array of certification authority authorization records + * available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + export function resolveCaa(hostname: string, callback: (err: NodeJS.ErrnoException | null, records: CaaRecord[]) => void): void; + export namespace resolveCaa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v0.1.27 + */ + export function resolveMx(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: MxRecord[]) => void): void; + export namespace resolveMx { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of + * objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v0.9.12 + */ + export function resolveNaptr(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: NaptrRecord[]) => void): void; + export namespace resolveNaptr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`). + * @since v0.1.90 + */ + export function resolveNs(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export namespace resolveNs { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of strings containing the reply records. + * @since v6.0.0 + */ + export function resolvePtr(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[]) => void): void; + export namespace resolvePtr { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. The `address` argument passed to the `callback` function will + * be an object with the following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v0.11.10 + */ + export function resolveSoa(hostname: string, callback: (err: NodeJS.ErrnoException | null, address: SoaRecord) => void): void; + export namespace resolveSoa { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will + * be an array of objects with the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v0.1.27 + */ + export function resolveSrv(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: SrvRecord[]) => void): void; + export namespace resolveSrv { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a + * two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v0.1.27 + */ + export function resolveTxt(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: string[][]) => void): void; + export namespace resolveTxt { + function __promisify__(hostname: string): Promise; + } + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * The `ret` argument passed to the `callback` function will be an array containing + * various types of records. Each object has a property `type` that indicates the + * type of the current record. And depending on the `type`, additional properties + * will be present on the object: + * + * + * + * Here is an example of the `ret` object passed to the callback: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * + * DNS server operators may choose not to respond to `ANY`queries. It may be better to call individual methods like {@link resolve4},{@link resolveMx}, and so on. For more details, see [RFC + * 8482](https://tools.ietf.org/html/rfc8482). + */ + export function resolveAny(hostname: string, callback: (err: NodeJS.ErrnoException | null, addresses: AnyRecord[]) => void): void; + export namespace resolveAny { + function __promisify__(hostname: string): Promise; + } + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, `err` is an `Error` object, where `err.code` is + * one of the `DNS error codes`. + * @since v0.1.16 + */ + export function reverse(ip: string, callback: (err: NodeJS.ErrnoException | null, hostnames: string[]) => void): void; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dns.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dns.setServers()` method must not be called while a DNS query is in + * progress. + * + * The {@link setServers} method affects only {@link resolve},`dns.resolve*()` and {@link reverse} (and specifically _not_ {@link lookup}). + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v0.11.3 + * @param servers array of `RFC 5952` formatted addresses + */ + export function setServers(servers: ReadonlyArray): void; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v0.11.3 + */ + export function getServers(): string[]; + /** + * Set the default value of `verbatim` in {@link lookup} and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `ipv4first` and {@link setDefaultResultOrder} have higher + * priority than `--dns-result-order`. When using `worker threads`,{@link setDefaultResultOrder} from the main thread won't affect the default + * dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + export function setDefaultResultOrder(order: 'ipv4first' | 'verbatim'): void; + // Error codes + export const NODATA: string; + export const FORMERR: string; + export const SERVFAIL: string; + export const NOTFOUND: string; + export const NOTIMP: string; + export const REFUSED: string; + export const BADQUERY: string; + export const BADNAME: string; + export const BADFAMILY: string; + export const BADRESP: string; + export const CONNREFUSED: string; + export const TIMEOUT: string; + export const EOF: string; + export const FILE: string; + export const NOMEM: string; + export const DESTRUCTION: string; + export const BADSTR: string; + export const BADFLAGS: string; + export const NONAME: string; + export const BADHINTS: string; + export const NOTINITIALIZED: string; + export const LOADIPHLPAPI: string; + export const ADDRGETNETWORKPARAMS: string; + export const CANCELLED: string; + export interface ResolverOptions { + timeout?: number | undefined; + /** + * @default 4 + */ + tries?: number; + } + /** + * An independent resolver for DNS requests. + * + * Creating a new resolver uses the default server settings. Setting + * the servers used for a resolver using `resolver.setServers()` does not affect + * other resolvers: + * + * ```js + * const { Resolver } = require('dns'); + * const resolver = new Resolver(); + * resolver.setServers(['4.4.4.4']); + * + * // This request will use the server at 4.4.4.4, independent of global settings. + * resolver.resolve4('example.org', (err, addresses) => { + * // ... + * }); + * ``` + * + * The following methods from the `dns` module are available: + * + * * `resolver.getServers()` + * * `resolver.resolve()` + * * `resolver.resolve4()` + * * `resolver.resolve6()` + * * `resolver.resolveAny()` + * * `resolver.resolveCaa()` + * * `resolver.resolveCname()` + * * `resolver.resolveMx()` + * * `resolver.resolveNaptr()` + * * `resolver.resolveNs()` + * * `resolver.resolvePtr()` + * * `resolver.resolveSoa()` + * * `resolver.resolveSrv()` + * * `resolver.resolveTxt()` + * * `resolver.reverse()` + * * `resolver.setServers()` + * @since v8.3.0 + */ + export class Resolver { + constructor(options?: ResolverOptions); + /** + * Cancel all outstanding DNS queries made by this resolver. The corresponding + * callbacks will be called with an error with code `ECANCELLED`. + * @since v8.3.0 + */ + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + /** + * The resolver instance will send its requests from the specified IP address. + * This allows programs to specify outbound interfaces when used on multi-homed + * systems. + * + * If a v4 or v6 address is not specified, it is set to the default, and the + * operating system will choose a local address automatically. + * + * The resolver will use the v4 local address when making requests to IPv4 DNS + * servers, and the v6 local address when making requests to IPv6 DNS servers. + * The `rrtype` of resolution requests has no impact on the local address used. + * @since v15.1.0, v14.17.0 + * @param [ipv4='0.0.0.0'] A string representation of an IPv4 address. + * @param [ipv6='::0'] A string representation of an IPv6 address. + */ + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } + export { dnsPromises as promises }; +} +declare module 'node:dns' { + export * from 'dns'; +} diff --git a/node_modules/@types/node/ts4.8/dns/promises.d.ts b/node_modules/@types/node/ts4.8/dns/promises.d.ts new file mode 100755 index 0000000..77cd807 --- /dev/null +++ b/node_modules/@types/node/ts4.8/dns/promises.d.ts @@ -0,0 +1,370 @@ +/** + * The `dns.promises` API provides an alternative set of asynchronous DNS methods + * that return `Promise` objects rather than using callbacks. The API is accessible + * via `require('dns').promises` or `require('dns/promises')`. + * @since v10.6.0 + */ +declare module 'dns/promises' { + import { + LookupAddress, + LookupOneOptions, + LookupAllOptions, + LookupOptions, + AnyRecord, + CaaRecord, + MxRecord, + NaptrRecord, + SoaRecord, + SrvRecord, + ResolveWithTtlOptions, + RecordWithTtl, + ResolveOptions, + ResolverOptions, + } from 'node:dns'; + /** + * Returns an array of IP address strings, formatted according to [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6), + * that are currently configured for DNS resolution. A string will include a port + * section if a custom port is used. + * + * ```js + * [ + * '4.4.4.4', + * '2001:4860:4860::8888', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ] + * ``` + * @since v10.6.0 + */ + function getServers(): string[]; + /** + * Resolves a host name (e.g. `'nodejs.org'`) into the first found A (IPv4) or + * AAAA (IPv6) record. All `option` properties are optional. If `options` is an + * integer, then it must be `4` or `6` – if `options` is not provided, then IPv4 + * and IPv6 addresses are both returned if found. + * + * With the `all` option set to `true`, the `Promise` is resolved with `addresses`being an array of objects with the properties `address` and `family`. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * Keep in mind that `err.code` will be set to `'ENOTFOUND'` not only when + * the host name does not exist but also when the lookup fails in other ways + * such as no available file descriptors. + * + * `dnsPromises.lookup()` does not necessarily have anything to do with the DNS + * protocol. The implementation uses an operating system facility that can + * associate names with addresses, and vice versa. This implementation can have + * subtle but important consequences on the behavior of any Node.js program. Please + * take some time to consult the `Implementation considerations section` before + * using `dnsPromises.lookup()`. + * + * Example usage: + * + * ```js + * const dns = require('dns'); + * const dnsPromises = dns.promises; + * const options = { + * family: 6, + * hints: dns.ADDRCONFIG | dns.V4MAPPED, + * }; + * + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('address: %j family: IPv%s', result.address, result.family); + * // address: "2606:2800:220:1:248:1893:25c8:1946" family: IPv6 + * }); + * + * // When options.all is true, the result will be an Array. + * options.all = true; + * dnsPromises.lookup('example.com', options).then((result) => { + * console.log('addresses: %j', result); + * // addresses: [{"address":"2606:2800:220:1:248:1893:25c8:1946","family":6}] + * }); + * ``` + * @since v10.6.0 + */ + function lookup(hostname: string, family: number): Promise; + function lookup(hostname: string, options: LookupOneOptions): Promise; + function lookup(hostname: string, options: LookupAllOptions): Promise; + function lookup(hostname: string, options: LookupOptions): Promise; + function lookup(hostname: string): Promise; + /** + * Resolves the given `address` and `port` into a host name and service using + * the operating system's underlying `getnameinfo` implementation. + * + * If `address` is not a valid IP address, a `TypeError` will be thrown. + * The `port` will be coerced to a number. If it is not a legal port, a `TypeError`will be thrown. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is the error code. + * + * ```js + * const dnsPromises = require('dns').promises; + * dnsPromises.lookupService('127.0.0.1', 22).then((result) => { + * console.log(result.hostname, result.service); + * // Prints: localhost ssh + * }); + * ``` + * @since v10.6.0 + */ + function lookupService( + address: string, + port: number + ): Promise<{ + hostname: string; + service: string; + }>; + /** + * Uses the DNS protocol to resolve a host name (e.g. `'nodejs.org'`) into an array + * of the resource records. When successful, the `Promise` is resolved with an + * array of resource records. The type and structure of individual results vary + * based on `rrtype`: + * + * + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + * @param hostname Host name to resolve. + * @param [rrtype='A'] Resource record type. + */ + function resolve(hostname: string): Promise; + function resolve(hostname: string, rrtype: 'A'): Promise; + function resolve(hostname: string, rrtype: 'AAAA'): Promise; + function resolve(hostname: string, rrtype: 'ANY'): Promise; + function resolve(hostname: string, rrtype: 'CAA'): Promise; + function resolve(hostname: string, rrtype: 'CNAME'): Promise; + function resolve(hostname: string, rrtype: 'MX'): Promise; + function resolve(hostname: string, rrtype: 'NAPTR'): Promise; + function resolve(hostname: string, rrtype: 'NS'): Promise; + function resolve(hostname: string, rrtype: 'PTR'): Promise; + function resolve(hostname: string, rrtype: 'SOA'): Promise; + function resolve(hostname: string, rrtype: 'SRV'): Promise; + function resolve(hostname: string, rrtype: 'TXT'): Promise; + function resolve(hostname: string, rrtype: string): Promise; + /** + * Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4 + * addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`). + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve4(hostname: string): Promise; + function resolve4(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve4(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve IPv6 addresses (`AAAA` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv6 + * addresses. + * @since v10.6.0 + * @param hostname Host name to resolve. + */ + function resolve6(hostname: string): Promise; + function resolve6(hostname: string, options: ResolveWithTtlOptions): Promise; + function resolve6(hostname: string, options: ResolveOptions): Promise; + /** + * Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query). + * On success, the `Promise` is resolved with an array containing various types of + * records. Each object has a property `type` that indicates the type of the + * current record. And depending on the `type`, additional properties will be + * present on the object: + * + * + * + * Here is an example of the result object: + * + * ```js + * [ { type: 'A', address: '127.0.0.1', ttl: 299 }, + * { type: 'CNAME', value: 'example.com' }, + * { type: 'MX', exchange: 'alt4.aspmx.l.example.com', priority: 50 }, + * { type: 'NS', value: 'ns1.example.com' }, + * { type: 'TXT', entries: [ 'v=spf1 include:_spf.example.com ~all' ] }, + * { type: 'SOA', + * nsname: 'ns1.example.com', + * hostmaster: 'admin.example.com', + * serial: 156696742, + * refresh: 900, + * retry: 900, + * expire: 1800, + * minttl: 60 } ] + * ``` + * @since v10.6.0 + */ + function resolveAny(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CAA` records for the `hostname`. On success, + * the `Promise` is resolved with an array of objects containing available + * certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`). + * @since v15.0.0, v14.17.0 + */ + function resolveCaa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success, + * the `Promise` is resolved with an array of canonical name records available for + * the `hostname` (e.g. `['bar.example.com']`). + * @since v10.6.0 + */ + function resolveCname(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects + * containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`). + * @since v10.6.0 + */ + function resolveMx(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array + * of objects with the following properties: + * + * * `flags` + * * `service` + * * `regexp` + * * `replacement` + * * `order` + * * `preference` + * + * ```js + * { + * flags: 's', + * service: 'SIP+D2U', + * regexp: '', + * replacement: '_sip._udp.example.com', + * order: 30, + * preference: 100 + * } + * ``` + * @since v10.6.0 + */ + function resolveNaptr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server + * records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`). + * @since v10.6.0 + */ + function resolveNs(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings + * containing the reply records. + * @since v10.6.0 + */ + function resolvePtr(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve a start of authority record (`SOA` record) for + * the `hostname`. On success, the `Promise` is resolved with an object with the + * following properties: + * + * * `nsname` + * * `hostmaster` + * * `serial` + * * `refresh` + * * `retry` + * * `expire` + * * `minttl` + * + * ```js + * { + * nsname: 'ns.example.com', + * hostmaster: 'root.example.com', + * serial: 2013101809, + * refresh: 10000, + * retry: 2400, + * expire: 604800, + * minttl: 3600 + * } + * ``` + * @since v10.6.0 + */ + function resolveSoa(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with + * the following properties: + * + * * `priority` + * * `weight` + * * `port` + * * `name` + * + * ```js + * { + * priority: 10, + * weight: 5, + * port: 21223, + * name: 'service.example.com' + * } + * ``` + * @since v10.6.0 + */ + function resolveSrv(hostname: string): Promise; + /** + * Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array + * of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of + * one record. Depending on the use case, these could be either joined together or + * treated separately. + * @since v10.6.0 + */ + function resolveTxt(hostname: string): Promise; + /** + * Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an + * array of host names. + * + * On error, the `Promise` is rejected with an `Error` object, where `err.code`is one of the `DNS error codes`. + * @since v10.6.0 + */ + function reverse(ip: string): Promise; + /** + * Sets the IP address and port of servers to be used when performing DNS + * resolution. The `servers` argument is an array of [RFC 5952](https://tools.ietf.org/html/rfc5952#section-6) formatted + * addresses. If the port is the IANA default DNS port (53) it can be omitted. + * + * ```js + * dnsPromises.setServers([ + * '4.4.4.4', + * '[2001:4860:4860::8888]', + * '4.4.4.4:1053', + * '[2001:4860:4860::8888]:1053', + * ]); + * ``` + * + * An error will be thrown if an invalid address is provided. + * + * The `dnsPromises.setServers()` method must not be called while a DNS query is in + * progress. + * + * This method works much like [resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html). + * That is, if attempting to resolve with the first server provided results in a`NOTFOUND` error, the `resolve()` method will _not_ attempt to resolve with + * subsequent servers provided. Fallback DNS servers will only be used if the + * earlier ones time out or result in some other error. + * @since v10.6.0 + * @param servers array of `RFC 5952` formatted addresses + */ + function setServers(servers: ReadonlyArray): void; + /** + * Set the default value of `verbatim` in `dns.lookup()` and `dnsPromises.lookup()`. The value could be: + * + * * `ipv4first`: sets default `verbatim` `false`. + * * `verbatim`: sets default `verbatim` `true`. + * + * The default is `ipv4first` and `dnsPromises.setDefaultResultOrder()` have + * higher priority than `--dns-result-order`. When using `worker threads`,`dnsPromises.setDefaultResultOrder()` from the main thread won't affect the + * default dns orders in workers. + * @since v16.4.0, v14.18.0 + * @param order must be `'ipv4first'` or `'verbatim'`. + */ + function setDefaultResultOrder(order: 'ipv4first' | 'verbatim'): void; + class Resolver { + constructor(options?: ResolverOptions); + cancel(): void; + getServers: typeof getServers; + resolve: typeof resolve; + resolve4: typeof resolve4; + resolve6: typeof resolve6; + resolveAny: typeof resolveAny; + resolveCname: typeof resolveCname; + resolveMx: typeof resolveMx; + resolveNaptr: typeof resolveNaptr; + resolveNs: typeof resolveNs; + resolvePtr: typeof resolvePtr; + resolveSoa: typeof resolveSoa; + resolveSrv: typeof resolveSrv; + resolveTxt: typeof resolveTxt; + reverse: typeof reverse; + setLocalAddress(ipv4?: string, ipv6?: string): void; + setServers: typeof setServers; + } +} +declare module 'node:dns/promises' { + export * from 'dns/promises'; +} diff --git a/node_modules/@types/node/ts4.8/dom-events.d.ts b/node_modules/@types/node/ts4.8/dom-events.d.ts new file mode 100755 index 0000000..b9c1c3a --- /dev/null +++ b/node_modules/@types/node/ts4.8/dom-events.d.ts @@ -0,0 +1,126 @@ +export {}; // Don't export anything! + +//// DOM-like Events +// NB: The Event / EventTarget / EventListener implementations below were copied +// from lib.dom.d.ts, then edited to reflect Node's documentation at +// https://nodejs.org/api/events.html#class-eventtarget. +// Please read that link to understand important implementation differences. + +// This conditional type will be the existing global Event in a browser, or +// the copy below in a Node environment. +type __Event = typeof globalThis extends { onmessage: any, Event: any } +? {} +: { + /** This is not used in Node.js and is provided purely for completeness. */ + readonly bubbles: boolean; + /** Alias for event.stopPropagation(). This is not used in Node.js and is provided purely for completeness. */ + cancelBubble: () => void; + /** True if the event was created with the cancelable option */ + readonly cancelable: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly composed: boolean; + /** Returns an array containing the current EventTarget as the only entry or empty if the event is not being dispatched. This is not used in Node.js and is provided purely for completeness. */ + composedPath(): [EventTarget?] + /** Alias for event.target. */ + readonly currentTarget: EventTarget | null; + /** Is true if cancelable is true and event.preventDefault() has been called. */ + readonly defaultPrevented: boolean; + /** This is not used in Node.js and is provided purely for completeness. */ + readonly eventPhase: 0 | 2; + /** The `AbortSignal` "abort" event is emitted with `isTrusted` set to `true`. The value is `false` in all other cases. */ + readonly isTrusted: boolean; + /** Sets the `defaultPrevented` property to `true` if `cancelable` is `true`. */ + preventDefault(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + returnValue: boolean; + /** Alias for event.target. */ + readonly srcElement: EventTarget | null; + /** Stops the invocation of event listeners after the current one completes. */ + stopImmediatePropagation(): void; + /** This is not used in Node.js and is provided purely for completeness. */ + stopPropagation(): void; + /** The `EventTarget` dispatching the event */ + readonly target: EventTarget | null; + /** The millisecond timestamp when the Event object was created. */ + readonly timeStamp: number; + /** Returns the type of event, e.g. "click", "hashchange", or "submit". */ + readonly type: string; +}; + +// See comment above explaining conditional type +type __EventTarget = typeof globalThis extends { onmessage: any, EventTarget: any } +? {} +: { + /** + * Adds a new handler for the `type` event. Any given `listener` is added only once per `type` and per `capture` option value. + * + * If the `once` option is true, the `listener` is removed after the next time a `type` event is dispatched. + * + * The `capture` option is not used by Node.js in any functional way other than tracking registered event listeners per the `EventTarget` specification. + * Specifically, the `capture` option is used as part of the key when registering a `listener`. + * Any individual `listener` may be added once with `capture = false`, and once with `capture = true`. + */ + addEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: AddEventListenerOptions | boolean, + ): void; + /** Dispatches a synthetic event event to target and returns true if either event's cancelable attribute value is false or its preventDefault() method was not invoked, and false otherwise. */ + dispatchEvent(event: Event): boolean; + /** Removes the event listener in target's event listener list with the same type, callback, and options. */ + removeEventListener( + type: string, + listener: EventListener | EventListenerObject, + options?: EventListenerOptions | boolean, + ): void; +}; + +interface EventInit { + bubbles?: boolean; + cancelable?: boolean; + composed?: boolean; +} + +interface EventListenerOptions { + /** Not directly used by Node.js. Added for API completeness. Default: `false`. */ + capture?: boolean; +} + +interface AddEventListenerOptions extends EventListenerOptions { + /** When `true`, the listener is automatically removed when it is first invoked. Default: `false`. */ + once?: boolean; + /** When `true`, serves as a hint that the listener will not call the `Event` object's `preventDefault()` method. Default: false. */ + passive?: boolean; +} + +interface EventListener { + (evt: Event): void; +} + +interface EventListenerObject { + handleEvent(object: Event): void; +} + +import {} from 'events'; // Make this an ambient declaration +declare global { + /** An event which takes place in the DOM. */ + interface Event extends __Event {} + var Event: typeof globalThis extends { onmessage: any, Event: infer T } + ? T + : { + prototype: __Event; + new (type: string, eventInitDict?: EventInit): __Event; + }; + + /** + * EventTarget is a DOM interface implemented by objects that can + * receive events and may have listeners for them. + */ + interface EventTarget extends __EventTarget {} + var EventTarget: typeof globalThis extends { onmessage: any, EventTarget: infer T } + ? T + : { + prototype: __EventTarget; + new (): __EventTarget; + }; +} diff --git a/node_modules/@types/node/ts4.8/domain.d.ts b/node_modules/@types/node/ts4.8/domain.d.ts new file mode 100755 index 0000000..fafe68a --- /dev/null +++ b/node_modules/@types/node/ts4.8/domain.d.ts @@ -0,0 +1,170 @@ +/** + * **This module is pending deprecation.** Once a replacement API has been + * finalized, this module will be fully deprecated. Most developers should + * **not** have cause to use this module. Users who absolutely must have + * the functionality that domains provide may rely on it for the time being + * but should expect to have to migrate to a different solution + * in the future. + * + * Domains provide a way to handle multiple different IO operations as a + * single group. If any of the event emitters or callbacks registered to a + * domain emit an `'error'` event, or throw an error, then the domain object + * will be notified, rather than losing the context of the error in the`process.on('uncaughtException')` handler, or causing the program to + * exit immediately with an error code. + * @deprecated Since v1.4.2 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/domain.js) + */ +declare module 'domain' { + import EventEmitter = require('node:events'); + /** + * The `Domain` class encapsulates the functionality of routing errors and + * uncaught exceptions to the active `Domain` object. + * + * To handle the errors that it catches, listen to its `'error'` event. + */ + class Domain extends EventEmitter { + /** + * An array of timers and event emitters that have been explicitly added + * to the domain. + */ + members: Array; + /** + * The `enter()` method is plumbing used by the `run()`, `bind()`, and`intercept()` methods to set the active domain. It sets `domain.active` and`process.domain` to the domain, and implicitly + * pushes the domain onto the domain + * stack managed by the domain module (see {@link exit} for details on the + * domain stack). The call to `enter()` delimits the beginning of a chain of + * asynchronous calls and I/O operations bound to a domain. + * + * Calling `enter()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + enter(): void; + /** + * The `exit()` method exits the current domain, popping it off the domain stack. + * Any time execution is going to switch to the context of a different chain of + * asynchronous calls, it's important to ensure that the current domain is exited. + * The call to `exit()` delimits either the end of or an interruption to the chain + * of asynchronous calls and I/O operations bound to a domain. + * + * If there are multiple, nested domains bound to the current execution context,`exit()` will exit any domains nested within this domain. + * + * Calling `exit()` changes only the active domain, and does not alter the domain + * itself. `enter()` and `exit()` can be called an arbitrary number of times on a + * single domain. + */ + exit(): void; + /** + * Run the supplied function in the context of the domain, implicitly + * binding all event emitters, timers, and lowlevel requests that are + * created in that context. Optionally, arguments can be passed to + * the function. + * + * This is the most basic way to use a domain. + * + * ```js + * const domain = require('domain'); + * const fs = require('fs'); + * const d = domain.create(); + * d.on('error', (er) => { + * console.error('Caught error!', er); + * }); + * d.run(() => { + * process.nextTick(() => { + * setTimeout(() => { // Simulating some various async stuff + * fs.open('non-existent file', 'r', (er, fd) => { + * if (er) throw er; + * // proceed... + * }); + * }, 100); + * }); + * }); + * ``` + * + * In this example, the `d.on('error')` handler will be triggered, rather + * than crashing the program. + */ + run(fn: (...args: any[]) => T, ...args: any[]): T; + /** + * Explicitly adds an emitter to the domain. If any event handlers called by + * the emitter throw an error, or if the emitter emits an `'error'` event, it + * will be routed to the domain's `'error'` event, just like with implicit + * binding. + * + * This also works with timers that are returned from `setInterval()` and `setTimeout()`. If their callback function throws, it will be caught by + * the domain `'error'` handler. + * + * If the Timer or `EventEmitter` was already bound to a domain, it is removed + * from that one, and bound to this one instead. + * @param emitter emitter or timer to be added to the domain + */ + add(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The opposite of {@link add}. Removes domain handling from the + * specified emitter. + * @param emitter emitter or timer to be removed from the domain + */ + remove(emitter: EventEmitter | NodeJS.Timer): void; + /** + * The returned function will be a wrapper around the supplied callback + * function. When the returned function is called, any errors that are + * thrown will be routed to the domain's `'error'` event. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.bind((er, data) => { + * // If this throws, it will also be passed to the domain. + * return cb(er, data ? JSON.parse(data) : null); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The bound function + */ + bind(callback: T): T; + /** + * This method is almost identical to {@link bind}. However, in + * addition to catching thrown errors, it will also intercept `Error` objects sent as the first argument to the function. + * + * In this way, the common `if (err) return callback(err);` pattern can be replaced + * with a single error handler in a single place. + * + * ```js + * const d = domain.create(); + * + * function readSomeFile(filename, cb) { + * fs.readFile(filename, 'utf8', d.intercept((data) => { + * // Note, the first argument is never passed to the + * // callback since it is assumed to be the 'Error' argument + * // and thus intercepted by the domain. + * + * // If this throws, it will also be passed to the domain + * // so the error-handling logic can be moved to the 'error' + * // event on the domain instead of being repeated throughout + * // the program. + * return cb(null, JSON.parse(data)); + * })); + * } + * + * d.on('error', (er) => { + * // An error occurred somewhere. If we throw it now, it will crash the program + * // with the normal line number and stack message. + * }); + * ``` + * @param callback The callback function + * @return The intercepted function + */ + intercept(callback: T): T; + } + function create(): Domain; +} +declare module 'node:domain' { + export * from 'domain'; +} diff --git a/node_modules/@types/node/ts4.8/events.d.ts b/node_modules/@types/node/ts4.8/events.d.ts new file mode 100755 index 0000000..4633df1 --- /dev/null +++ b/node_modules/@types/node/ts4.8/events.d.ts @@ -0,0 +1,678 @@ +/** + * Much of the Node.js core API is built around an idiomatic asynchronous + * event-driven architecture in which certain kinds of objects (called "emitters") + * emit named events that cause `Function` objects ("listeners") to be called. + * + * For instance: a `net.Server` object emits an event each time a peer + * connects to it; a `fs.ReadStream` emits an event when the file is opened; + * a `stream` emits an event whenever data is available to be read. + * + * All objects that emit events are instances of the `EventEmitter` class. These + * objects expose an `eventEmitter.on()` function that allows one or more + * functions to be attached to named events emitted by the object. Typically, + * event names are camel-cased strings but any valid JavaScript property key + * can be used. + * + * When the `EventEmitter` object emits an event, all of the functions attached + * to that specific event are called _synchronously_. Any values returned by the + * called listeners are _ignored_ and discarded. + * + * The following example shows a simple `EventEmitter` instance with a single + * listener. The `eventEmitter.on()` method is used to register listeners, while + * the `eventEmitter.emit()` method is used to trigger the event. + * + * ```js + * const EventEmitter = require('events'); + * + * class MyEmitter extends EventEmitter {} + * + * const myEmitter = new MyEmitter(); + * myEmitter.on('event', () => { + * console.log('an event occurred!'); + * }); + * myEmitter.emit('event'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/events.js) + */ +declare module 'events' { + // NOTE: This class is in the docs but is **not actually exported** by Node. + // If https://github.com/nodejs/node/issues/39903 gets resolved and Node + // actually starts exporting the class, uncomment below. + + // import { EventListener, EventListenerObject } from '__dom-events'; + // /** The NodeEventTarget is a Node.js-specific extension to EventTarget that emulates a subset of the EventEmitter API. */ + // interface NodeEventTarget extends EventTarget { + // /** + // * Node.js-specific extension to the `EventTarget` class that emulates the equivalent `EventEmitter` API. + // * The only difference between `addListener()` and `addEventListener()` is that addListener() will return a reference to the EventTarget. + // */ + // addListener(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that returns an array of event `type` names for which event listeners are registered. */ + // eventNames(): string[]; + // /** Node.js-specific extension to the `EventTarget` class that returns the number of event listeners registered for the `type`. */ + // listenerCount(type: string): number; + // /** Node.js-specific alias for `eventTarget.removeListener()`. */ + // off(type: string, listener: EventListener | EventListenerObject): this; + // /** Node.js-specific alias for `eventTarget.addListener()`. */ + // on(type: string, listener: EventListener | EventListenerObject, options?: { once: boolean }): this; + // /** Node.js-specific extension to the `EventTarget` class that adds a `once` listener for the given event `type`. This is equivalent to calling `on` with the `once` option set to `true`. */ + // once(type: string, listener: EventListener | EventListenerObject): this; + // /** + // * Node.js-specific extension to the `EventTarget` class. + // * If `type` is specified, removes all registered listeners for `type`, + // * otherwise removes all registered listeners. + // */ + // removeAllListeners(type: string): this; + // /** + // * Node.js-specific extension to the `EventTarget` class that removes the listener for the given `type`. + // * The only difference between `removeListener()` and `removeEventListener()` is that `removeListener()` will return a reference to the `EventTarget`. + // */ + // removeListener(type: string, listener: EventListener | EventListenerObject): this; + // } + + interface EventEmitterOptions { + /** + * Enables automatic capturing of promise rejection. + */ + captureRejections?: boolean | undefined; + } + // Any EventTarget with a Node-style `once` function + interface _NodeEventTarget { + once(eventName: string | symbol, listener: (...args: any[]) => void): this; + } + // Any EventTarget with a DOM-style `addEventListener` + interface _DOMEventTarget { + addEventListener( + eventName: string, + listener: (...args: any[]) => void, + opts?: { + once: boolean; + } + ): any; + } + interface StaticEventEmitterOptions { + signal?: AbortSignal | undefined; + } + interface EventEmitter extends NodeJS.EventEmitter {} + /** + * The `EventEmitter` class is defined and exposed by the `events` module: + * + * ```js + * const EventEmitter = require('events'); + * ``` + * + * All `EventEmitter`s emit the event `'newListener'` when new listeners are + * added and `'removeListener'` when existing listeners are removed. + * + * It supports the following option: + * @since v0.1.26 + */ + class EventEmitter { + constructor(options?: EventEmitterOptions); + /** + * Creates a `Promise` that is fulfilled when the `EventEmitter` emits the given + * event or that is rejected if the `EventEmitter` emits `'error'` while waiting. + * The `Promise` will resolve with an array of all the arguments emitted to the + * given event. + * + * This method is intentionally generic and works with the web platform [EventTarget](https://dom.spec.whatwg.org/#interface-eventtarget) interface, which has no special`'error'` event + * semantics and does not listen to the `'error'` event. + * + * ```js + * const { once, EventEmitter } = require('events'); + * + * async function run() { + * const ee = new EventEmitter(); + * + * process.nextTick(() => { + * ee.emit('myevent', 42); + * }); + * + * const [value] = await once(ee, 'myevent'); + * console.log(value); + * + * const err = new Error('kaboom'); + * process.nextTick(() => { + * ee.emit('error', err); + * }); + * + * try { + * await once(ee, 'myevent'); + * } catch (err) { + * console.log('error happened', err); + * } + * } + * + * run(); + * ``` + * + * The special handling of the `'error'` event is only used when `events.once()`is used to wait for another event. If `events.once()` is used to wait for the + * '`error'` event itself, then it is treated as any other kind of event without + * special handling: + * + * ```js + * const { EventEmitter, once } = require('events'); + * + * const ee = new EventEmitter(); + * + * once(ee, 'error') + * .then(([err]) => console.log('ok', err.message)) + * .catch((err) => console.log('error', err.message)); + * + * ee.emit('error', new Error('boom')); + * + * // Prints: ok boom + * ``` + * + * An `AbortSignal` can be used to cancel waiting for the event: + * + * ```js + * const { EventEmitter, once } = require('events'); + * + * const ee = new EventEmitter(); + * const ac = new AbortController(); + * + * async function foo(emitter, event, signal) { + * try { + * await once(emitter, event, { signal }); + * console.log('event emitted!'); + * } catch (error) { + * if (error.name === 'AbortError') { + * console.error('Waiting for the event was canceled!'); + * } else { + * console.error('There was an error', error.message); + * } + * } + * } + * + * foo(ee, 'foo', ac.signal); + * ac.abort(); // Abort waiting for the event + * ee.emit('foo'); // Prints: Waiting for the event was canceled! + * ``` + * @since v11.13.0, v10.16.0 + */ + static once(emitter: _NodeEventTarget, eventName: string | symbol, options?: StaticEventEmitterOptions): Promise; + static once(emitter: _DOMEventTarget, eventName: string, options?: StaticEventEmitterOptions): Promise; + /** + * ```js + * const { on, EventEmitter } = require('events'); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo')) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * ``` + * + * Returns an `AsyncIterator` that iterates `eventName` events. It will throw + * if the `EventEmitter` emits `'error'`. It removes all listeners when + * exiting the loop. The `value` returned by each iteration is an array + * composed of the emitted event arguments. + * + * An `AbortSignal` can be used to cancel waiting on events: + * + * ```js + * const { on, EventEmitter } = require('events'); + * const ac = new AbortController(); + * + * (async () => { + * const ee = new EventEmitter(); + * + * // Emit later on + * process.nextTick(() => { + * ee.emit('foo', 'bar'); + * ee.emit('foo', 42); + * }); + * + * for await (const event of on(ee, 'foo', { signal: ac.signal })) { + * // The execution of this inner block is synchronous and it + * // processes one event at a time (even with await). Do not use + * // if concurrent execution is required. + * console.log(event); // prints ['bar'] [42] + * } + * // Unreachable here + * })(); + * + * process.nextTick(() => ac.abort()); + * ``` + * @since v13.6.0, v12.16.0 + * @param eventName The name of the event being listened for + * @return that iterates `eventName` events emitted by the `emitter` + */ + static on(emitter: NodeJS.EventEmitter, eventName: string, options?: StaticEventEmitterOptions): AsyncIterableIterator; + /** + * A class method that returns the number of listeners for the given `eventName`registered on the given `emitter`. + * + * ```js + * const { EventEmitter, listenerCount } = require('events'); + * const myEmitter = new EventEmitter(); + * myEmitter.on('event', () => {}); + * myEmitter.on('event', () => {}); + * console.log(listenerCount(myEmitter, 'event')); + * // Prints: 2 + * ``` + * @since v0.9.12 + * @deprecated Since v3.2.0 - Use `listenerCount` instead. + * @param emitter The emitter to query + * @param eventName The event name + */ + static listenerCount(emitter: NodeJS.EventEmitter, eventName: string | symbol): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * For `EventEmitter`s this behaves exactly the same as calling `.listeners` on + * the emitter. + * + * For `EventTarget`s this is the only way to get the event listeners for the + * event target. This is useful for debugging and diagnostic purposes. + * + * ```js + * const { getEventListeners, EventEmitter } = require('events'); + * + * { + * const ee = new EventEmitter(); + * const listener = () => console.log('Events are fun'); + * ee.on('foo', listener); + * getEventListeners(ee, 'foo'); // [listener] + * } + * { + * const et = new EventTarget(); + * const listener = () => console.log('Events are fun'); + * et.addEventListener('foo', listener); + * getEventListeners(et, 'foo'); // [listener] + * } + * ``` + * @since v15.2.0, v14.17.0 + */ + static getEventListeners(emitter: _DOMEventTarget | NodeJS.EventEmitter, name: string | symbol): Function[]; + /** + * ```js + * const { + * setMaxListeners, + * EventEmitter + * } = require('events'); + * + * const target = new EventTarget(); + * const emitter = new EventEmitter(); + * + * setMaxListeners(5, target, emitter); + * ``` + * @since v15.4.0 + * @param n A non-negative number. The maximum number of listeners per `EventTarget` event. + * @param eventsTargets Zero or more {EventTarget} or {EventEmitter} instances. If none are specified, `n` is set as the default max for all newly created {EventTarget} and {EventEmitter} + * objects. + */ + static setMaxListeners(n?: number, ...eventTargets: Array<_DOMEventTarget | NodeJS.EventEmitter>): void; + /** + * This symbol shall be used to install a listener for only monitoring `'error'` + * events. Listeners installed using this symbol are called before the regular + * `'error'` listeners are called. + * + * Installing a listener using this symbol does not change the behavior once an + * `'error'` event is emitted, therefore the process will still crash if no + * regular `'error'` listener is installed. + */ + static readonly errorMonitor: unique symbol; + static readonly captureRejectionSymbol: unique symbol; + /** + * Sets or gets the default captureRejection value for all emitters. + */ + // TODO: These should be described using static getter/setter pairs: + static captureRejections: boolean; + static defaultMaxListeners: number; + } + import internal = require('node:events'); + namespace EventEmitter { + // Should just be `export { EventEmitter }`, but that doesn't work in TypeScript 3.4 + export { internal as EventEmitter }; + export interface Abortable { + /** + * When provided the corresponding `AbortController` can be used to cancel an asynchronous action. + */ + signal?: AbortSignal | undefined; + } + } + global { + namespace NodeJS { + interface EventEmitter { + /** + * Alias for `emitter.on(eventName, listener)`. + * @since v0.1.26 + */ + addListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Adds the `listener` function to the end of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * const myEE = new EventEmitter(); + * myEE.on('foo', () => console.log('a')); + * myEE.prependListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.1.101 + * @param eventName The name of the event. + * @param listener The callback function + */ + on(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName`. The + * next time `eventName` is triggered, this listener is removed and then invoked. + * + * ```js + * server.once('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * + * By default, event listeners are invoked in the order they are added. The`emitter.prependOnceListener()` method can be used as an alternative to add the + * event listener to the beginning of the listeners array. + * + * ```js + * const myEE = new EventEmitter(); + * myEE.once('foo', () => console.log('a')); + * myEE.prependOnceListener('foo', () => console.log('b')); + * myEE.emit('foo'); + * // Prints: + * // b + * // a + * ``` + * @since v0.3.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + once(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Removes the specified `listener` from the listener array for the event named`eventName`. + * + * ```js + * const callback = (stream) => { + * console.log('someone connected!'); + * }; + * server.on('connection', callback); + * // ... + * server.removeListener('connection', callback); + * ``` + * + * `removeListener()` will remove, at most, one instance of a listener from the + * listener array. If any single listener has been added multiple times to the + * listener array for the specified `eventName`, then `removeListener()` must be + * called multiple times to remove each instance. + * + * Once an event is emitted, all listeners attached to it at the + * time of emitting are called in order. This implies that any`removeListener()` or `removeAllListeners()` calls _after_ emitting and _before_ the last listener finishes execution + * will not remove them from`emit()` in progress. Subsequent events behave as expected. + * + * ```js + * const myEmitter = new MyEmitter(); + * + * const callbackA = () => { + * console.log('A'); + * myEmitter.removeListener('event', callbackB); + * }; + * + * const callbackB = () => { + * console.log('B'); + * }; + * + * myEmitter.on('event', callbackA); + * + * myEmitter.on('event', callbackB); + * + * // callbackA removes listener callbackB but it will still be called. + * // Internal listener array at time of emit [callbackA, callbackB] + * myEmitter.emit('event'); + * // Prints: + * // A + * // B + * + * // callbackB is now removed. + * // Internal listener array [callbackA] + * myEmitter.emit('event'); + * // Prints: + * // A + * ``` + * + * Because listeners are managed using an internal array, calling this will + * change the position indices of any listener registered _after_ the listener + * being removed. This will not impact the order in which listeners are called, + * but it means that any copies of the listener array as returned by + * the `emitter.listeners()` method will need to be recreated. + * + * When a single function has been added as a handler multiple times for a single + * event (as in the example below), `removeListener()` will remove the most + * recently added instance. In the example the `once('ping')`listener is removed: + * + * ```js + * const ee = new EventEmitter(); + * + * function pong() { + * console.log('pong'); + * } + * + * ee.on('ping', pong); + * ee.once('ping', pong); + * ee.removeListener('ping', pong); + * + * ee.emit('ping'); + * ee.emit('ping'); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Alias for `emitter.removeListener()`. + * @since v10.0.0 + */ + off(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Removes all listeners, or those of the specified `eventName`. + * + * It is bad practice to remove listeners added elsewhere in the code, + * particularly when the `EventEmitter` instance was created by some other + * component or module (e.g. sockets or file streams). + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.1.26 + */ + removeAllListeners(event?: string | symbol): this; + /** + * By default `EventEmitter`s will print a warning if more than `10` listeners are + * added for a particular event. This is a useful default that helps finding + * memory leaks. The `emitter.setMaxListeners()` method allows the limit to be + * modified for this specific `EventEmitter` instance. The value can be set to`Infinity` (or `0`) to indicate an unlimited number of listeners. + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v0.3.5 + */ + setMaxListeners(n: number): this; + /** + * Returns the current max listener value for the `EventEmitter` which is either + * set by `emitter.setMaxListeners(n)` or defaults to {@link defaultMaxListeners}. + * @since v1.0.0 + */ + getMaxListeners(): number; + /** + * Returns a copy of the array of listeners for the event named `eventName`. + * + * ```js + * server.on('connection', (stream) => { + * console.log('someone connected!'); + * }); + * console.log(util.inspect(server.listeners('connection'))); + * // Prints: [ [Function] ] + * ``` + * @since v0.1.26 + */ + listeners(eventName: string | symbol): Function[]; + /** + * Returns a copy of the array of listeners for the event named `eventName`, + * including any wrappers (such as those created by `.once()`). + * + * ```js + * const emitter = new EventEmitter(); + * emitter.once('log', () => console.log('log once')); + * + * // Returns a new Array with a function `onceWrapper` which has a property + * // `listener` which contains the original listener bound above + * const listeners = emitter.rawListeners('log'); + * const logFnWrapper = listeners[0]; + * + * // Logs "log once" to the console and does not unbind the `once` event + * logFnWrapper.listener(); + * + * // Logs "log once" to the console and removes the listener + * logFnWrapper(); + * + * emitter.on('log', () => console.log('log persistently')); + * // Will return a new Array with a single function bound by `.on()` above + * const newListeners = emitter.rawListeners('log'); + * + * // Logs "log persistently" twice + * newListeners[0](); + * emitter.emit('log'); + * ``` + * @since v9.4.0 + */ + rawListeners(eventName: string | symbol): Function[]; + /** + * Synchronously calls each of the listeners registered for the event named`eventName`, in the order they were registered, passing the supplied arguments + * to each. + * + * Returns `true` if the event had listeners, `false` otherwise. + * + * ```js + * const EventEmitter = require('events'); + * const myEmitter = new EventEmitter(); + * + * // First listener + * myEmitter.on('event', function firstListener() { + * console.log('Helloooo! first listener'); + * }); + * // Second listener + * myEmitter.on('event', function secondListener(arg1, arg2) { + * console.log(`event with parameters ${arg1}, ${arg2} in second listener`); + * }); + * // Third listener + * myEmitter.on('event', function thirdListener(...args) { + * const parameters = args.join(', '); + * console.log(`event with parameters ${parameters} in third listener`); + * }); + * + * console.log(myEmitter.listeners('event')); + * + * myEmitter.emit('event', 1, 2, 3, 4, 5); + * + * // Prints: + * // [ + * // [Function: firstListener], + * // [Function: secondListener], + * // [Function: thirdListener] + * // ] + * // Helloooo! first listener + * // event with parameters 1, 2 in second listener + * // event with parameters 1, 2, 3, 4, 5 in third listener + * ``` + * @since v0.1.26 + */ + emit(eventName: string | symbol, ...args: any[]): boolean; + /** + * Returns the number of listeners listening to the event named `eventName`. + * @since v3.2.0 + * @param eventName The name of the event being listened for + */ + listenerCount(eventName: string | symbol): number; + /** + * Adds the `listener` function to the _beginning_ of the listeners array for the + * event named `eventName`. No checks are made to see if the `listener` has + * already been added. Multiple calls passing the same combination of `eventName`and `listener` will result in the `listener` being added, and called, multiple + * times. + * + * ```js + * server.prependListener('connection', (stream) => { + * console.log('someone connected!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Adds a **one-time**`listener` function for the event named `eventName` to the _beginning_ of the listeners array. The next time `eventName` is triggered, this + * listener is removed, and then invoked. + * + * ```js + * server.prependOnceListener('connection', (stream) => { + * console.log('Ah, we have our first user!'); + * }); + * ``` + * + * Returns a reference to the `EventEmitter`, so that calls can be chained. + * @since v6.0.0 + * @param eventName The name of the event. + * @param listener The callback function + */ + prependOnceListener(eventName: string | symbol, listener: (...args: any[]) => void): this; + /** + * Returns an array listing the events for which the emitter has registered + * listeners. The values in the array are strings or `Symbol`s. + * + * ```js + * const EventEmitter = require('events'); + * const myEE = new EventEmitter(); + * myEE.on('foo', () => {}); + * myEE.on('bar', () => {}); + * + * const sym = Symbol('symbol'); + * myEE.on(sym, () => {}); + * + * console.log(myEE.eventNames()); + * // Prints: [ 'foo', 'bar', Symbol(symbol) ] + * ``` + * @since v6.0.0 + */ + eventNames(): Array; + } + } + } + export = EventEmitter; +} +declare module 'node:events' { + import events = require('events'); + export = events; +} diff --git a/node_modules/@types/node/ts4.8/fs.d.ts b/node_modules/@types/node/ts4.8/fs.d.ts new file mode 100755 index 0000000..75c53fb --- /dev/null +++ b/node_modules/@types/node/ts4.8/fs.d.ts @@ -0,0 +1,3872 @@ +/** + * The `fs` module enables interacting with the file system in a + * way modeled on standard POSIX functions. + * + * To use the promise-based APIs: + * + * ```js + * import * as fs from 'fs/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as fs from 'fs'; + * ``` + * + * All file system operations have synchronous, callback, and promise-based + * forms, and are accessible using both CommonJS syntax and ES6 Modules (ESM). + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/fs.js) + */ +declare module 'fs' { + import * as stream from 'node:stream'; + import { Abortable, EventEmitter } from 'node:events'; + import { URL } from 'node:url'; + import * as promises from 'node:fs/promises'; + export { promises }; + /** + * Valid types for path values in "fs". + */ + export type PathLike = string | Buffer | URL; + export type PathOrFileDescriptor = PathLike | number; + export type TimeLike = string | number | Date; + export type NoParamCallback = (err: NodeJS.ErrnoException | null) => void; + export type BufferEncodingOption = + | 'buffer' + | { + encoding: 'buffer'; + }; + export interface ObjectEncodingOptions { + encoding?: BufferEncoding | null | undefined; + } + export type EncodingOption = ObjectEncodingOptions | BufferEncoding | undefined | null; + export type OpenMode = number | string; + export type Mode = number | string; + export interface StatsBase { + isFile(): boolean; + isDirectory(): boolean; + isBlockDevice(): boolean; + isCharacterDevice(): boolean; + isSymbolicLink(): boolean; + isFIFO(): boolean; + isSocket(): boolean; + dev: T; + ino: T; + mode: T; + nlink: T; + uid: T; + gid: T; + rdev: T; + size: T; + blksize: T; + blocks: T; + atimeMs: T; + mtimeMs: T; + ctimeMs: T; + birthtimeMs: T; + atime: Date; + mtime: Date; + ctime: Date; + birthtime: Date; + } + export interface Stats extends StatsBase {} + /** + * A `fs.Stats` object provides information about a file. + * + * Objects returned from {@link stat}, {@link lstat} and {@link fstat} and + * their synchronous counterparts are of this type. + * If `bigint` in the `options` passed to those methods is true, the numeric values + * will be `bigint` instead of `number`, and the object will contain additional + * nanosecond-precision properties suffixed with `Ns`. + * + * ```console + * Stats { + * dev: 2114, + * ino: 48064969, + * mode: 33188, + * nlink: 1, + * uid: 85, + * gid: 100, + * rdev: 0, + * size: 527, + * blksize: 4096, + * blocks: 8, + * atimeMs: 1318289051000.1, + * mtimeMs: 1318289051000.1, + * ctimeMs: 1318289051000.1, + * birthtimeMs: 1318289051000.1, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * + * `bigint` version: + * + * ```console + * BigIntStats { + * dev: 2114n, + * ino: 48064969n, + * mode: 33188n, + * nlink: 1n, + * uid: 85n, + * gid: 100n, + * rdev: 0n, + * size: 527n, + * blksize: 4096n, + * blocks: 8n, + * atimeMs: 1318289051000n, + * mtimeMs: 1318289051000n, + * ctimeMs: 1318289051000n, + * birthtimeMs: 1318289051000n, + * atimeNs: 1318289051000000000n, + * mtimeNs: 1318289051000000000n, + * ctimeNs: 1318289051000000000n, + * birthtimeNs: 1318289051000000000n, + * atime: Mon, 10 Oct 2011 23:24:11 GMT, + * mtime: Mon, 10 Oct 2011 23:24:11 GMT, + * ctime: Mon, 10 Oct 2011 23:24:11 GMT, + * birthtime: Mon, 10 Oct 2011 23:24:11 GMT } + * ``` + * @since v0.1.21 + */ + export class Stats {} + /** + * A representation of a directory entry, which can be a file or a subdirectory + * within the directory, as returned by reading from an `fs.Dir`. The + * directory entry is a combination of the file name and file type pairs. + * + * Additionally, when {@link readdir} or {@link readdirSync} is called with + * the `withFileTypes` option set to `true`, the resulting array is filled with `fs.Dirent` objects, rather than strings or `Buffer` s. + * @since v10.10.0 + */ + export class Dirent { + /** + * Returns `true` if the `fs.Dirent` object describes a regular file. + * @since v10.10.0 + */ + isFile(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a file system + * directory. + * @since v10.10.0 + */ + isDirectory(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a block device. + * @since v10.10.0 + */ + isBlockDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a character device. + * @since v10.10.0 + */ + isCharacterDevice(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a symbolic link. + * @since v10.10.0 + */ + isSymbolicLink(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a first-in-first-out + * (FIFO) pipe. + * @since v10.10.0 + */ + isFIFO(): boolean; + /** + * Returns `true` if the `fs.Dirent` object describes a socket. + * @since v10.10.0 + */ + isSocket(): boolean; + /** + * The file name that this `fs.Dirent` object refers to. The type of this + * value is determined by the `options.encoding` passed to {@link readdir} or {@link readdirSync}. + * @since v10.10.0 + */ + name: string; + } + /** + * A class representing a directory stream. + * + * Created by {@link opendir}, {@link opendirSync}, or `fsPromises.opendir()`. + * + * ```js + * import { opendir } from 'fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + */ + export class Dir implements AsyncIterable { + /** + * The read-only path of this directory as was provided to {@link opendir},{@link opendirSync}, or `fsPromises.opendir()`. + * @since v12.12.0 + */ + readonly path: string; + /** + * Asynchronously iterates over the directory via `readdir(3)` until all entries have been read. + */ + [Symbol.asyncIterator](): AsyncIterableIterator; + /** + * Asynchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * + * A promise is returned that will be resolved after the resource has been + * closed. + * @since v12.12.0 + */ + close(): Promise; + close(cb: NoParamCallback): void; + /** + * Synchronously close the directory's underlying resource handle. + * Subsequent reads will result in errors. + * @since v12.12.0 + */ + closeSync(): void; + /** + * Asynchronously read the next directory entry via [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) as an `fs.Dirent`. + * + * A promise is returned that will be resolved with an `fs.Dirent`, or `null`if there are no more directory entries to read. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + * @return containing {fs.Dirent|null} + */ + read(): Promise; + read(cb: (err: NodeJS.ErrnoException | null, dirEnt: Dirent | null) => void): void; + /** + * Synchronously read the next directory entry as an `fs.Dirent`. See the + * POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more detail. + * + * If there are no more directory entries to read, `null` will be returned. + * + * Directory entries returned by this function are in no particular order as + * provided by the operating system's underlying directory mechanisms. + * Entries added or removed while iterating over the directory might not be + * included in the iteration results. + * @since v12.12.0 + */ + readSync(): Dirent | null; + } + /** + * Class: fs.StatWatcher + * @since v14.3.0, v12.20.0 + * Extends `EventEmitter` + * A successful call to {@link watchFile} method will return a new fs.StatWatcher object. + */ + export interface StatWatcher extends EventEmitter { + /** + * When called, requests that the Node.js event loop _not_ exit so long as the `fs.StatWatcher` is active. Calling `watcher.ref()` multiple times will have + * no effect. + * + * By default, all `fs.StatWatcher` objects are "ref'ed", making it normally + * unnecessary to call `watcher.ref()` unless `watcher.unref()` had been + * called previously. + * @since v14.3.0, v12.20.0 + */ + ref(): this; + /** + * When called, the active `fs.StatWatcher` object will not require the Node.js + * event loop to remain active. If there is no other activity keeping the + * event loop running, the process may exit before the `fs.StatWatcher` object's + * callback is invoked. Calling `watcher.unref()` multiple times will have + * no effect. + * @since v14.3.0, v12.20.0 + */ + unref(): this; + } + export interface FSWatcher extends EventEmitter { + /** + * Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable. + * @since v0.5.8 + */ + close(): void; + /** + * events.EventEmitter + * 1. change + * 2. error + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + addListener(event: 'error', listener: (error: Error) => void): this; + addListener(event: 'close', listener: () => void): this; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + on(event: 'error', listener: (error: Error) => void): this; + on(event: 'close', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + once(event: 'error', listener: (error: Error) => void): this; + once(event: 'close', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + prependListener(event: 'error', listener: (error: Error) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this; + prependOnceListener(event: 'error', listener: (error: Error) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + } + /** + * Instances of `fs.ReadStream` are created and returned using the {@link createReadStream} function. + * @since v0.1.93 + */ + export class ReadStream extends stream.Readable { + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes that have been read so far. + * @since v6.4.0 + */ + bytesRead: number; + /** + * The path to the file the stream is reading from as specified in the first + * argument to `fs.createReadStream()`. If `path` is passed as a string, then`readStream.path` will be a string. If `path` is passed as a `Buffer`, then`readStream.path` will be a + * `Buffer`. If `fd` is specified, then`readStream.path` will be `undefined`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0, v10.16.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'open', listener: (fd: number) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'readable', listener: () => void): this; + addListener(event: 'ready', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: Buffer | string) => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'open', listener: (fd: number) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'readable', listener: () => void): this; + on(event: 'ready', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: Buffer | string) => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'open', listener: (fd: number) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'readable', listener: () => void): this; + once(event: 'ready', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'open', listener: (fd: number) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'readable', listener: () => void): this; + prependListener(event: 'ready', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'open', listener: (fd: number) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'readable', listener: () => void): this; + prependOnceListener(event: 'ready', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * * Extends `stream.Writable` + * + * Instances of `fs.WriteStream` are created and returned using the {@link createWriteStream} function. + * @since v0.1.93 + */ + export class WriteStream extends stream.Writable { + /** + * Closes `writeStream`. Optionally accepts a + * callback that will be executed once the `writeStream`is closed. + * @since v0.9.4 + */ + close(callback?: (err?: NodeJS.ErrnoException | null) => void): void; + /** + * The number of bytes written so far. Does not include data that is still queued + * for writing. + * @since v0.4.7 + */ + bytesWritten: number; + /** + * The path to the file the stream is writing to as specified in the first + * argument to {@link createWriteStream}. If `path` is passed as a string, then`writeStream.path` will be a string. If `path` is passed as a `Buffer`, then`writeStream.path` will be a + * `Buffer`. + * @since v0.1.93 + */ + path: string | Buffer; + /** + * This property is `true` if the underlying file has not been opened yet, + * i.e. before the `'ready'` event is emitted. + * @since v11.2.0 + */ + pending: boolean; + /** + * events.EventEmitter + * 1. open + * 2. close + * 3. ready + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'open', listener: (fd: number) => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'ready', listener: () => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'open', listener: (fd: number) => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'ready', listener: () => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'open', listener: (fd: number) => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'ready', listener: () => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'open', listener: (fd: number) => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'ready', listener: () => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'open', listener: (fd: number) => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'ready', listener: () => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * Asynchronously rename file at `oldPath` to the pathname provided + * as `newPath`. In the case that `newPath` already exists, it will + * be overwritten. If there is a directory at `newPath`, an error will + * be raised instead. No arguments other than a possible exception are + * given to the completion callback. + * + * See also: [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html). + * + * ```js + * import { rename } from 'fs'; + * + * rename('oldFile.txt', 'newFile.txt', (err) => { + * if (err) throw err; + * console.log('Rename complete!'); + * }); + * ``` + * @since v0.0.2 + */ + export function rename(oldPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace rename { + /** + * Asynchronous rename(2) - Change the name or location of a file or directory. + * @param oldPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(oldPath: PathLike, newPath: PathLike): Promise; + } + /** + * Renames the file from `oldPath` to `newPath`. Returns `undefined`. + * + * See the POSIX [`rename(2)`](http://man7.org/linux/man-pages/man2/rename.2.html) documentation for more details. + * @since v0.1.21 + */ + export function renameSync(oldPath: PathLike, newPath: PathLike): void; + /** + * Truncates the file. No arguments other than a possible exception are + * given to the completion callback. A file descriptor can also be passed as the + * first argument. In this case, `fs.ftruncate()` is called. + * + * ```js + * import { truncate } from 'fs'; + * // Assuming that 'path/file.txt' is a regular file. + * truncate('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was truncated'); + * }); + * ``` + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * + * See the POSIX [`truncate(2)`](http://man7.org/linux/man-pages/man2/truncate.2.html) documentation for more details. + * @since v0.8.6 + * @param [len=0] + */ + export function truncate(path: PathLike, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function truncate(path: PathLike, callback: NoParamCallback): void; + export namespace truncate { + /** + * Asynchronous truncate(2) - Truncate a file to a specified length. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(path: PathLike, len?: number | null): Promise; + } + /** + * Truncates the file. Returns `undefined`. A file descriptor can also be + * passed as the first argument. In this case, `fs.ftruncateSync()` is called. + * + * Passing a file descriptor is deprecated and may result in an error being thrown + * in the future. + * @since v0.8.6 + * @param [len=0] + */ + export function truncateSync(path: PathLike, len?: number | null): void; + /** + * Truncates the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`ftruncate(2)`](http://man7.org/linux/man-pages/man2/ftruncate.2.html) documentation for more detail. + * + * If the file referred to by the file descriptor was larger than `len` bytes, only + * the first `len` bytes will be retained in the file. + * + * For example, the following program retains only the first four bytes of the + * file: + * + * ```js + * import { open, close, ftruncate } from 'fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('temp.txt', 'r+', (err, fd) => { + * if (err) throw err; + * + * try { + * ftruncate(fd, 4, (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * if (err) throw err; + * } + * }); + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncate(fd: number, len: number | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + */ + export function ftruncate(fd: number, callback: NoParamCallback): void; + export namespace ftruncate { + /** + * Asynchronous ftruncate(2) - Truncate a file to a specified length. + * @param fd A file descriptor. + * @param len If not specified, defaults to `0`. + */ + function __promisify__(fd: number, len?: number | null): Promise; + } + /** + * Truncates the file descriptor. Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link ftruncate}. + * @since v0.8.6 + * @param [len=0] + */ + export function ftruncateSync(fd: number, len?: number | null): void; + /** + * Asynchronously changes owner and group of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace chown { + /** + * Asynchronous chown(2) - Change ownership of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Synchronously changes owner and group of a file. Returns `undefined`. + * This is the synchronous version of {@link chown}. + * + * See the POSIX [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html) documentation for more detail. + * @since v0.1.97 + */ + export function chownSync(path: PathLike, uid: number, gid: number): void; + /** + * Sets the owner of the file. No arguments other than a possible exception are + * given to the completion callback. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchown(fd: number, uid: number, gid: number, callback: NoParamCallback): void; + export namespace fchown { + /** + * Asynchronous fchown(2) - Change ownership of a file. + * @param fd A file descriptor. + */ + function __promisify__(fd: number, uid: number, gid: number): Promise; + } + /** + * Sets the owner of the file. Returns `undefined`. + * + * See the POSIX [`fchown(2)`](http://man7.org/linux/man-pages/man2/fchown.2.html) documentation for more detail. + * @since v0.4.7 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function fchownSync(fd: number, uid: number, gid: number): void; + /** + * Set the owner of the symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more detail. + */ + export function lchown(path: PathLike, uid: number, gid: number, callback: NoParamCallback): void; + export namespace lchown { + /** + * Asynchronous lchown(2) - Change ownership of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, uid: number, gid: number): Promise; + } + /** + * Set the owner for the path. Returns `undefined`. + * + * See the POSIX [`lchown(2)`](http://man7.org/linux/man-pages/man2/lchown.2.html) documentation for more details. + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + */ + export function lchownSync(path: PathLike, uid: number, gid: number): void; + /** + * Changes the access and modification times of a file in the same way as {@link utimes}, with the difference that if the path refers to a symbolic + * link, then the link is not dereferenced: instead, the timestamps of the + * symbolic link itself are changed. + * + * No arguments other than a possible exception are given to the completion + * callback. + * @since v14.5.0, v12.19.0 + */ + export function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace lutimes { + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, + * with the difference that if the path refers to a symbolic link, then the link is not + * dereferenced: instead, the timestamps of the symbolic link itself are changed. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Change the file system timestamps of the symbolic link referenced by `path`. + * Returns `undefined`, or throws an exception when parameters are incorrect or + * the operation fails. This is the synchronous version of {@link lutimes}. + * @since v14.5.0, v12.19.0 + */ + export function lutimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Asynchronously changes the permissions of a file. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * + * ```js + * import { chmod } from 'fs'; + * + * chmod('my_file.txt', 0o775, (err) => { + * if (err) throw err; + * console.log('The permissions for file "my_file.txt" have been changed!'); + * }); + * ``` + * @since v0.1.30 + */ + export function chmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + export namespace chmod { + /** + * Asynchronous chmod(2) - Change permissions of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link chmod}. + * + * See the POSIX [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html) documentation for more detail. + * @since v0.6.7 + */ + export function chmodSync(path: PathLike, mode: Mode): void; + /** + * Sets the permissions on the file. No arguments other than a possible exception + * are given to the completion callback. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmod(fd: number, mode: Mode, callback: NoParamCallback): void; + export namespace fchmod { + /** + * Asynchronous fchmod(2) - Change permissions of a file. + * @param fd A file descriptor. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(fd: number, mode: Mode): Promise; + } + /** + * Sets the permissions on the file. Returns `undefined`. + * + * See the POSIX [`fchmod(2)`](http://man7.org/linux/man-pages/man2/fchmod.2.html) documentation for more detail. + * @since v0.4.7 + */ + export function fchmodSync(fd: number, mode: Mode): void; + /** + * Changes the permissions on a symbolic link. No arguments other than a possible + * exception are given to the completion callback. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmod(path: PathLike, mode: Mode, callback: NoParamCallback): void; + /** @deprecated */ + export namespace lchmod { + /** + * Asynchronous lchmod(2) - Change permissions of a file. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. + */ + function __promisify__(path: PathLike, mode: Mode): Promise; + } + /** + * Changes the permissions on a symbolic link. Returns `undefined`. + * + * This method is only implemented on macOS. + * + * See the POSIX [`lchmod(2)`](https://www.freebsd.org/cgi/man.cgi?query=lchmod&sektion=2) documentation for more detail. + * @deprecated Since v0.4.7 + */ + export function lchmodSync(path: PathLike, mode: Mode): void; + /** + * Asynchronous [`stat(2)`](http://man7.org/linux/man-pages/man2/stat.2.html). The callback gets two arguments `(err, stats)` where`stats` is an `fs.Stats` object. + * + * In case of an error, the `err.code` will be one of `Common System Errors`. + * + * Using `fs.stat()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()` or `fs.writeFile()` is not recommended. + * Instead, user code should open/read/write the file directly and handle the + * error raised if the file is not available. + * + * To check if a file exists without manipulating it afterwards, {@link access} is recommended. + * + * For example, given the following directory structure: + * + * ```text + * - txtDir + * -- file.txt + * - app.js + * ``` + * + * The next program will check for the stats of the given paths: + * + * ```js + * import { stat } from 'fs'; + * + * const pathsToCheck = ['./txtDir', './txtDir/file.txt']; + * + * for (let i = 0; i < pathsToCheck.length; i++) { + * stat(pathsToCheck[i], (err, stats) => { + * console.log(stats.isDirectory()); + * console.log(stats); + * }); + * } + * ``` + * + * The resulting output will resemble: + * + * ```console + * true + * Stats { + * dev: 16777220, + * mode: 16877, + * nlink: 3, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214262, + * size: 96, + * blocks: 0, + * atimeMs: 1561174653071.963, + * mtimeMs: 1561174614583.3518, + * ctimeMs: 1561174626623.5366, + * birthtimeMs: 1561174126937.2893, + * atime: 2019-06-22T03:37:33.072Z, + * mtime: 2019-06-22T03:36:54.583Z, + * ctime: 2019-06-22T03:37:06.624Z, + * birthtime: 2019-06-22T03:28:46.937Z + * } + * false + * Stats { + * dev: 16777220, + * mode: 33188, + * nlink: 1, + * uid: 501, + * gid: 20, + * rdev: 0, + * blksize: 4096, + * ino: 14214074, + * size: 8, + * blocks: 8, + * atimeMs: 1561174616618.8555, + * mtimeMs: 1561174614584, + * ctimeMs: 1561174614583.8145, + * birthtimeMs: 1561174007710.7478, + * atime: 2019-06-22T03:36:56.619Z, + * mtime: 2019-06-22T03:36:54.584Z, + * ctime: 2019-06-22T03:36:54.584Z, + * birthtime: 2019-06-22T03:26:47.711Z + * } + * ``` + * @since v0.0.2 + */ + export function stat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function stat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void + ): void; + export function stat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void + ): void; + export function stat(path: PathLike, options: StatOptions | undefined, callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void): void; + export namespace stat { + /** + * Asynchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + } + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + export interface StatSyncFn extends Function { + (path: PathLike, options?: undefined): Stats; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + throwIfNoEntry: false; + } + ): Stats | undefined; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + throwIfNoEntry: false; + } + ): BigIntStats | undefined; + ( + path: PathLike, + options?: StatSyncOptions & { + bigint?: false | undefined; + } + ): Stats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: true; + } + ): BigIntStats; + ( + path: PathLike, + options: StatSyncOptions & { + bigint: boolean; + throwIfNoEntry?: false | undefined; + } + ): Stats | BigIntStats; + (path: PathLike, options?: StatSyncOptions): Stats | BigIntStats | undefined; + } + /** + * Synchronous stat(2) - Get file status. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const statSync: StatSyncFn; + /** + * Invokes the callback with the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstat(fd: number, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function fstat( + fd: number, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void + ): void; + export function fstat( + fd: number, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void + ): void; + export function fstat(fd: number, options: StatOptions | undefined, callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void): void; + export namespace fstat { + /** + * Asynchronous fstat(2) - Get file status. + * @param fd A file descriptor. + */ + function __promisify__( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function __promisify__( + fd: number, + options: StatOptions & { + bigint: true; + } + ): Promise; + function __promisify__(fd: number, options?: StatOptions): Promise; + } + /** + * Retrieves the `fs.Stats` for the file descriptor. + * + * See the POSIX [`fstat(2)`](http://man7.org/linux/man-pages/man2/fstat.2.html) documentation for more detail. + * @since v0.1.95 + */ + export function fstatSync( + fd: number, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Stats; + export function fstatSync( + fd: number, + options: StatOptions & { + bigint: true; + } + ): BigIntStats; + export function fstatSync(fd: number, options?: StatOptions): Stats | BigIntStats; + /** + * Retrieves the `fs.Stats` for the symbolic link referred to by the path. + * The callback gets two arguments `(err, stats)` where `stats` is a `fs.Stats` object. `lstat()` is identical to `stat()`, except that if `path` is a symbolic + * link, then the link itself is stat-ed, not the file that it refers to. + * + * See the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) documentation for more details. + * @since v0.1.30 + */ + export function lstat(path: PathLike, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void): void; + export function lstat( + path: PathLike, + options: + | (StatOptions & { + bigint?: false | undefined; + }) + | undefined, + callback: (err: NodeJS.ErrnoException | null, stats: Stats) => void + ): void; + export function lstat( + path: PathLike, + options: StatOptions & { + bigint: true; + }, + callback: (err: NodeJS.ErrnoException | null, stats: BigIntStats) => void + ): void; + export function lstat(path: PathLike, options: StatOptions | undefined, callback: (err: NodeJS.ErrnoException | null, stats: Stats | BigIntStats) => void): void; + export namespace lstat { + /** + * Asynchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__( + path: PathLike, + options?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function __promisify__( + path: PathLike, + options: StatOptions & { + bigint: true; + } + ): Promise; + function __promisify__(path: PathLike, options?: StatOptions): Promise; + } + /** + * Synchronous lstat(2) - Get file status. Does not dereference symbolic links. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export const lstatSync: StatSyncFn; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. No arguments other than + * a possible + * exception are given to the completion callback. + * @since v0.1.31 + */ + export function link(existingPath: PathLike, newPath: PathLike, callback: NoParamCallback): void; + export namespace link { + /** + * Asynchronous link(2) - Create a new link (also known as a hard link) to an existing file. + * @param existingPath A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param newPath A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(existingPath: PathLike, newPath: PathLike): Promise; + } + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.31 + */ + export function linkSync(existingPath: PathLike, newPath: PathLike): void; + /** + * Creates the link called `path` pointing to `target`. No arguments other than a + * possible exception are given to the completion callback. + * + * See the POSIX [`symlink(2)`](http://man7.org/linux/man-pages/man2/symlink.2.html) documentation for more details. + * + * The `type` argument is only available on Windows and ignored on other platforms. + * It can be set to `'dir'`, `'file'`, or `'junction'`. If the `type` argument is + * not set, Node.js will autodetect `target` type and use `'file'` or `'dir'`. If + * the `target` does not exist, `'file'` will be used. Windows junction points + * require the destination path to be absolute. When using `'junction'`, the`target` argument will automatically be normalized to absolute path. + * + * Relative targets are relative to the link’s parent directory. + * + * ```js + * import { symlink } from 'fs'; + * + * symlink('./mew', './mewtwo', callback); + * ``` + * + * The above example creates a symbolic link `mewtwo` which points to `mew` in the + * same directory: + * + * ```bash + * $ tree . + * . + * ├── mew + * └── mewtwo -> ./mew + * ``` + * @since v0.1.31 + */ + export function symlink(target: PathLike, path: PathLike, type: symlink.Type | undefined | null, callback: NoParamCallback): void; + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + */ + export function symlink(target: PathLike, path: PathLike, callback: NoParamCallback): void; + export namespace symlink { + /** + * Asynchronous symlink(2) - Create a new symbolic link to an existing file. + * @param target A path to an existing file. If a URL is provided, it must use the `file:` protocol. + * @param path A path to the new symlink. If a URL is provided, it must use the `file:` protocol. + * @param type May be set to `'dir'`, `'file'`, or `'junction'` (default is `'file'`) and is only available on Windows (ignored on other platforms). + * When using `'junction'`, the `target` argument will automatically be normalized to an absolute path. + */ + function __promisify__(target: PathLike, path: PathLike, type?: string | null): Promise; + type Type = 'dir' | 'file' | 'junction'; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link symlink}. + * @since v0.1.31 + */ + export function symlinkSync(target: PathLike, path: PathLike, type?: symlink.Type | null): void; + /** + * Reads the contents of the symbolic link referred to by `path`. The callback gets + * two arguments `(err, linkString)`. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path passed to the callback. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlink(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, linkString: string) => void): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink(path: PathLike, options: BufferEncodingOption, callback: (err: NodeJS.ErrnoException | null, linkString: Buffer) => void): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlink(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, linkString: string | Buffer) => void): void; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readlink(path: PathLike, callback: (err: NodeJS.ErrnoException | null, linkString: string) => void): void; + export namespace readlink { + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + } + /** + * Returns the symbolic link's string value. + * + * See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, + * the link path returned will be passed as a `Buffer` object. + * @since v0.1.31 + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readlinkSync(path: PathLike, options?: EncodingOption): string | Buffer; + /** + * Asynchronously computes the canonical pathname by resolving `.`, `..` and + * symbolic links. + * + * A canonical pathname is not necessarily unique. Hard links and bind mounts can + * expose a file system entity through many pathnames. + * + * This function behaves like [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html), with some exceptions: + * + * 1. No case conversion is performed on case-insensitive file systems. + * 2. The maximum number of symbolic links is platform-independent and generally + * (much) higher than what the native [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html) implementation supports. + * + * The `callback` gets two arguments `(err, resolvedPath)`. May use `process.cwd`to resolve relative paths. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * If `path` resolves to a socket or a pipe, the function will return a system + * dependent name for that object. + * @since v0.1.31 + */ + export function realpath(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath(path: PathLike, options: BufferEncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpath(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void): void; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function realpath(path: PathLike, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + export namespace realpath { + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(path: PathLike, options?: EncodingOption): Promise; + /** + * Asynchronous [`realpath(3)`](http://man7.org/linux/man-pages/man3/realpath.3.html). + * + * The `callback` gets two arguments `(err, resolvedPath)`. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path passed to the callback. If the `encoding` is set to `'buffer'`, + * the path returned will be passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v9.2.0 + */ + function native(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + function native(path: PathLike, options: BufferEncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: Buffer) => void): void; + function native(path: PathLike, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string | Buffer) => void): void; + function native(path: PathLike, callback: (err: NodeJS.ErrnoException | null, resolvedPath: string) => void): void; + } + /** + * Returns the resolved pathname. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link realpath}. + * @since v0.1.31 + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options: BufferEncodingOption): Buffer; + /** + * Synchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function realpathSync(path: PathLike, options?: EncodingOption): string | Buffer; + export namespace realpathSync { + function native(path: PathLike, options?: EncodingOption): string; + function native(path: PathLike, options: BufferEncodingOption): Buffer; + function native(path: PathLike, options?: EncodingOption): string | Buffer; + } + /** + * Asynchronously removes a file or symbolic link. No arguments other than a + * possible exception are given to the completion callback. + * + * ```js + * import { unlink } from 'fs'; + * // Assuming that 'path/file.txt' is a regular file. + * unlink('path/file.txt', (err) => { + * if (err) throw err; + * console.log('path/file.txt was deleted'); + * }); + * ``` + * + * `fs.unlink()` will not work on a directory, empty or otherwise. To remove a + * directory, use {@link rmdir}. + * + * See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more details. + * @since v0.0.2 + */ + export function unlink(path: PathLike, callback: NoParamCallback): void; + export namespace unlink { + /** + * Asynchronous unlink(2) - delete a name and possibly the file it refers to. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Synchronous [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html). Returns `undefined`. + * @since v0.1.21 + */ + export function unlinkSync(path: PathLike): void; + export interface RmDirOptions { + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * @deprecated since v14.14.0 In future versions of Node.js and will trigger a warning + * `fs.rmdir(path, { recursive: true })` will throw if `path` does not exist or is a file. + * Use `fs.rm(path, { recursive: true, force: true })` instead. + * + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). No arguments other than a possible exception are given + * to the completion callback. + * + * Using `fs.rmdir()` on a file (not a directory) results in an `ENOENT` error on + * Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rm} with options `{ recursive: true, force: true }`. + * @since v0.0.2 + */ + export function rmdir(path: PathLike, callback: NoParamCallback): void; + export function rmdir(path: PathLike, options: RmDirOptions, callback: NoParamCallback): void; + export namespace rmdir { + /** + * Asynchronous rmdir(2) - delete a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + function __promisify__(path: PathLike, options?: RmDirOptions): Promise; + } + /** + * Synchronous [`rmdir(2)`](http://man7.org/linux/man-pages/man2/rmdir.2.html). Returns `undefined`. + * + * Using `fs.rmdirSync()` on a file (not a directory) results in an `ENOENT` error + * on Windows and an `ENOTDIR` error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use {@link rmSync} with options `{ recursive: true, force: true }`. + * @since v0.1.21 + */ + export function rmdirSync(path: PathLike, options?: RmDirOptions): void; + export interface RmOptions { + /** + * When `true`, exceptions will be ignored if `path` does not exist. + * @default false + */ + force?: boolean | undefined; + /** + * If an `EBUSY`, `EMFILE`, `ENFILE`, `ENOTEMPTY`, or + * `EPERM` error is encountered, Node.js will retry the operation with a linear + * backoff wait of `retryDelay` ms longer on each try. This option represents the + * number of retries. This option is ignored if the `recursive` option is not + * `true`. + * @default 0 + */ + maxRetries?: number | undefined; + /** + * If `true`, perform a recursive directory removal. In + * recursive mode, operations are retried on failure. + * @default false + */ + recursive?: boolean | undefined; + /** + * The amount of time in milliseconds to wait between retries. + * This option is ignored if the `recursive` option is not `true`. + * @default 100 + */ + retryDelay?: number | undefined; + } + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the + * completion callback. + * @since v14.14.0 + */ + export function rm(path: PathLike, callback: NoParamCallback): void; + export function rm(path: PathLike, options: RmOptions, callback: NoParamCallback): void; + export namespace rm { + /** + * Asynchronously removes files and directories (modeled on the standard POSIX `rm` utility). + */ + function __promisify__(path: PathLike, options?: RmOptions): Promise; + } + /** + * Synchronously removes files and directories (modeled on the standard POSIX `rm`utility). Returns `undefined`. + * @since v14.14.0 + */ + export function rmSync(path: PathLike, options?: RmOptions): void; + export interface MakeDirectoryOptions { + /** + * Indicates whether parent folders should be created. + * If a folder was created, the path to the first created folder will be returned. + * @default false + */ + recursive?: boolean | undefined; + /** + * A file mode. If a string is passed, it is parsed as an octal integer. If not specified + * @default 0o777 + */ + mode?: Mode | undefined; + } + /** + * Asynchronously creates a directory. + * + * The callback is given a possible exception and, if `recursive` is `true`, the + * first directory path created, `(err[, path])`.`path` can still be `undefined` when `recursive` is `true`, if no directory was + * created. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fs.mkdir()` when `path` is a directory that + * exists results in an error only + * when `recursive` is false. + * + * ```js + * import { mkdir } from 'fs'; + * + * // Creates /tmp/a/apple, regardless of whether `/tmp` and /tmp/a exist. + * mkdir('/tmp/a/apple', { recursive: true }, (err) => { + * if (err) throw err; + * }); + * ``` + * + * On Windows, using `fs.mkdir()` on the root directory even with recursion will + * result in an error: + * + * ```js + * import { mkdir } from 'fs'; + * + * mkdir('/', { recursive: true }, (err) => { + * // => [Error: EPERM: operation not permitted, mkdir 'C:\'] + * }); + * ``` + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.8 + */ + export function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + }, + callback: (err: NodeJS.ErrnoException | null, path?: string) => void + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir( + path: PathLike, + options: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + | undefined, + callback: NoParamCallback + ): void; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdir(path: PathLike, options: Mode | MakeDirectoryOptions | null | undefined, callback: (err: NodeJS.ErrnoException | null, path?: string) => void): void; + /** + * Asynchronous mkdir(2) - create a directory with a mode of `0o777`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function mkdir(path: PathLike, callback: NoParamCallback): void; + export namespace mkdir { + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + } + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function __promisify__(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + } + /** + * Synchronously creates a directory. Returns `undefined`, or if `recursive` is`true`, the first directory path created. + * This is the synchronous version of {@link mkdir}. + * + * See the POSIX [`mkdir(2)`](http://man7.org/linux/man-pages/man2/mkdir.2.html) documentation for more details. + * @since v0.1.21 + */ + export function mkdirSync( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + } + ): string | undefined; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + ): void; + /** + * Synchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + export function mkdirSync(path: PathLike, options?: Mode | MakeDirectoryOptions | null): string | undefined; + /** + * Creates a unique temporary directory. + * + * Generates six random characters to be appended behind a required`prefix` to create a unique temporary directory. Due to platform + * inconsistencies, avoid trailing `X` characters in `prefix`. Some platforms, + * notably the BSDs, can return more than six random characters, and replace + * trailing `X` characters in `prefix` with random characters. + * + * The created directory path is passed as a string to the callback's second + * parameter. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'fs'; + * + * mkdtemp(path.join(os.tmpdir(), 'foo-'), (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Prints: /tmp/foo-itXde2 or C:\Users\...\AppData\Local\Temp\foo-itXde2 + * }); + * ``` + * + * The `fs.mkdtemp()` method will append the six randomly selected characters + * directly to the `prefix` string. For instance, given a directory `/tmp`, if the + * intention is to create a temporary directory _within_`/tmp`, the `prefix`must end with a trailing platform-specific path separator + * (`require('path').sep`). + * + * ```js + * import { tmpdir } from 'os'; + * import { mkdtemp } from 'fs'; + * + * // The parent directory for the new temporary directory + * const tmpDir = tmpdir(); + * + * // This method is *INCORRECT*: + * mkdtemp(tmpDir, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmpabc123`. + * // A new temporary directory is created at the file system root + * // rather than *within* the /tmp directory. + * }); + * + * // This method is *CORRECT*: + * import { sep } from 'path'; + * mkdtemp(`${tmpDir}${sep}`, (err, directory) => { + * if (err) throw err; + * console.log(directory); + * // Will print something similar to `/tmp/abc123`. + * // A new temporary directory is created within + * // the /tmp directory. + * }); + * ``` + * @since v5.10.0 + */ + export function mkdtemp(prefix: string, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, folder: string) => void): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp( + prefix: string, + options: + | 'buffer' + | { + encoding: 'buffer'; + }, + callback: (err: NodeJS.ErrnoException | null, folder: Buffer) => void + ): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtemp(prefix: string, options: EncodingOption, callback: (err: NodeJS.ErrnoException | null, folder: string | Buffer) => void): void; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + */ + export function mkdtemp(prefix: string, callback: (err: NodeJS.ErrnoException | null, folder: string) => void): void; + export namespace mkdtemp { + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__(prefix: string, options?: EncodingOption): Promise; + } + /** + * Returns the created directory path. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link mkdtemp}. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * @since v5.10.0 + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options: BufferEncodingOption): Buffer; + /** + * Synchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required prefix to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function mkdtempSync(prefix: string, options?: EncodingOption): string | Buffer; + /** + * Reads the contents of a directory. The callback gets two arguments `(err, files)`where `files` is an array of the names of the files in the directory excluding`'.'` and `'..'`. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames passed to the callback. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the `files` array will contain `fs.Dirent` objects. + * @since v0.1.8 + */ + export function readdir( + path: PathLike, + options: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + } + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[]) => void + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + | 'buffer', + callback: (err: NodeJS.ErrnoException | null, files: Buffer[]) => void + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdir( + path: PathLike, + options: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, files: string[] | Buffer[]) => void + ): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function readdir(path: PathLike, callback: (err: NodeJS.ErrnoException | null, files: string[]) => void): void; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + }, + callback: (err: NodeJS.ErrnoException | null, files: Dirent[]) => void + ): void; + export namespace readdir { + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + } + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options: + | 'buffer' + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function __promisify__( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent + */ + function __promisify__( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + } + ): Promise; + } + /** + * Reads the contents of the directory. + * + * See the POSIX [`readdir(3)`](http://man7.org/linux/man-pages/man3/readdir.3.html) documentation for more details. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames returned. If the `encoding` is set to `'buffer'`, + * the filenames returned will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the result will contain `fs.Dirent` objects. + * @since v0.1.21 + */ + export function readdirSync( + path: PathLike, + options?: + | { + encoding: BufferEncoding | null; + withFileTypes?: false | undefined; + } + | BufferEncoding + | null + ): string[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options: + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + | 'buffer' + ): Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + export function readdirSync( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): string[] | Buffer[]; + /** + * Synchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + export function readdirSync( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + } + ): Dirent[]; + /** + * Closes the file descriptor. No arguments other than a possible exception are + * given to the completion callback. + * + * Calling `fs.close()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.0.2 + */ + export function close(fd: number, callback?: NoParamCallback): void; + export namespace close { + /** + * Asynchronous close(2) - close a file descriptor. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Closes the file descriptor. Returns `undefined`. + * + * Calling `fs.closeSync()` on any file descriptor (`fd`) that is currently in use + * through any other `fs` operation may lead to undefined behavior. + * + * See the POSIX [`close(2)`](http://man7.org/linux/man-pages/man2/close.2.html) documentation for more detail. + * @since v0.1.21 + */ + export function closeSync(fd: number): void; + /** + * Asynchronous file open. See the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more details. + * + * `mode` sets the file mode (permission and sticky bits), but only if the file was + * created. On Windows, only the write permission can be manipulated; see {@link chmod}. + * + * The callback gets two arguments `(err, fd)`. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * + * Functions based on `fs.open()` exhibit this behavior as well:`fs.writeFile()`, `fs.readFile()`, etc. + * @since v0.0.2 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] + */ + export function open(path: PathLike, flags: OpenMode | undefined, mode: Mode | undefined | null, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param [flags='r'] See `support of file system `flags``. + */ + export function open(path: PathLike, flags: OpenMode | undefined, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + /** + * Asynchronous open(2) - open and possibly create a file. If the file is created, its mode will be `0o666`. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + */ + export function open(path: PathLike, callback: (err: NodeJS.ErrnoException | null, fd: number) => void): void; + + export namespace open { + /** + * Asynchronous open(2) - open and possibly create a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param mode A file mode. If a string is passed, it is parsed as an octal integer. If not supplied, defaults to `0o666`. + */ + function __promisify__(path: PathLike, flags: OpenMode, mode?: Mode | null): Promise; + } + /** + * Returns an integer representing the file descriptor. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link open}. + * @since v0.1.21 + * @param [flags='r'] + * @param [mode=0o666] + */ + export function openSync(path: PathLike, flags: OpenMode, mode?: Mode | null): number; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time in seconds,`Date`s, or a numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity` or`-Infinity`, an `Error` will be thrown. + * @since v0.4.2 + */ + export function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace utimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied path. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Returns `undefined`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link utimes}. + * @since v0.4.2 + */ + export function utimesSync(path: PathLike, atime: TimeLike, mtime: TimeLike): void; + /** + * Change the file system timestamps of the object referenced by the supplied file + * descriptor. See {@link utimes}. + * @since v0.4.2 + */ + export function futimes(fd: number, atime: TimeLike, mtime: TimeLike, callback: NoParamCallback): void; + export namespace futimes { + /** + * Asynchronously change file timestamps of the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param atime The last access time. If a string is provided, it will be coerced to number. + * @param mtime The last modified time. If a string is provided, it will be coerced to number. + */ + function __promisify__(fd: number, atime: TimeLike, mtime: TimeLike): Promise; + } + /** + * Synchronous version of {@link futimes}. Returns `undefined`. + * @since v0.4.2 + */ + export function futimesSync(fd: number, atime: TimeLike, mtime: TimeLike): void; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. No arguments other + * than a possible exception are given to the completion callback. + * @since v0.1.96 + */ + export function fsync(fd: number, callback: NoParamCallback): void; + export namespace fsync { + /** + * Asynchronous fsync(2) - synchronize a file's in-core state with the underlying storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. Returns `undefined`. + * @since v0.1.96 + */ + export function fsyncSync(fd: number): void; + /** + * Write `buffer` to the file specified by `fd`. + * + * `offset` determines the part of the buffer to be written, and `length` is + * an integer specifying the number of bytes to write. + * + * `position` refers to the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. See [`pwrite(2)`](http://man7.org/linux/man-pages/man2/pwrite.2.html). + * + * The callback will be given three arguments `(err, bytesWritten, buffer)` where`bytesWritten` specifies how many _bytes_ were written from `buffer`. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesWritten` and `buffer` properties. + * + * It is unsafe to use `fs.write()` multiple times on the same file without waiting + * for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v0.0.2 + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + position: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + length: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + */ + export function write( + fd: number, + buffer: TBuffer, + offset: number | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void + ): void; + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + */ + export function write(fd: number, buffer: TBuffer, callback: (err: NodeJS.ErrnoException | null, written: number, buffer: TBuffer) => void): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function write( + fd: number, + string: string, + position: number | undefined | null, + encoding: BufferEncoding | undefined | null, + callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void + ): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + export function write(fd: number, string: string, position: number | undefined | null, callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void): void; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + */ + export function write(fd: number, string: string, callback: (err: NodeJS.ErrnoException | null, written: number, str: string) => void): void; + export namespace write { + /** + * Asynchronously writes `buffer` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param offset The part of the buffer to be written. If not supplied, defaults to `0`. + * @param length The number of bytes to write. If not supplied, defaults to `buffer.length - offset`. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + */ + function __promisify__( + fd: number, + buffer?: TBuffer, + offset?: number, + length?: number, + position?: number | null + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + /** + * Asynchronously writes `string` to the file referenced by the supplied file descriptor. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + function __promisify__( + fd: number, + string: string, + position?: number | null, + encoding?: BufferEncoding | null + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link write}. + * @since v0.1.21 + * @return The number of bytes written. + */ + export function writeSync(fd: number, buffer: NodeJS.ArrayBufferView, offset?: number | null, length?: number | null, position?: number | null): number; + /** + * Synchronously writes `string` to the file referenced by the supplied file descriptor, returning the number of bytes written. + * @param fd A file descriptor. + * @param string A string to write. + * @param position The offset from the beginning of the file where this data should be written. If not supplied, defaults to the current position. + * @param encoding The expected string encoding. + */ + export function writeSync(fd: number, string: string, position?: number | null, encoding?: BufferEncoding | null): number; + export type ReadPosition = number | bigint; + export interface ReadSyncOptions { + /** + * @default 0 + */ + offset?: number | undefined; + /** + * @default `length of buffer` + */ + length?: number | undefined; + /** + * @default null + */ + position?: ReadPosition | null | undefined; + } + export interface ReadAsyncOptions extends ReadSyncOptions { + buffer?: TBuffer; + } + /** + * Read data from the file specified by `fd`. + * + * The callback is given the three arguments, `(err, bytesRead, buffer)`. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffer` properties. + * @since v0.0.2 + * @param buffer The buffer that the data will be written to. + * @param offset The position in `buffer` to write the data to. + * @param length The number of bytes to read. + * @param position Specifies where to begin reading from in the file. If `position` is `null` or `-1 `, data will be read from the current file position, and the file position will be updated. If + * `position` is an integer, the file position will be unchanged. + */ + export function read( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: ReadPosition | null, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void + ): void; + /** + * Similar to the above `fs.read` function, this version takes an optional `options` object. + * If not otherwise specified in an `options` object, + * `buffer` defaults to `Buffer.alloc(16384)`, + * `offset` defaults to `0`, + * `length` defaults to `buffer.byteLength`, `- offset` as of Node 17.6.0 + * `position` defaults to `null` + * @since v12.17.0, 13.11.0 + */ + export function read( + fd: number, + options: ReadAsyncOptions, + callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: TBuffer) => void + ): void; + export function read(fd: number, callback: (err: NodeJS.ErrnoException | null, bytesRead: number, buffer: NodeJS.ArrayBufferView) => void): void; + export namespace read { + /** + * @param fd A file descriptor. + * @param buffer The buffer that the data will be written to. + * @param offset The offset in the buffer at which to start writing. + * @param length The number of bytes to read. + * @param position The offset from the beginning of the file from which data should be read. If `null`, data will be read from the current position. + */ + function __promisify__( + fd: number, + buffer: TBuffer, + offset: number, + length: number, + position: number | null + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__( + fd: number, + options: ReadAsyncOptions + ): Promise<{ + bytesRead: number; + buffer: TBuffer; + }>; + function __promisify__(fd: number): Promise<{ + bytesRead: number; + buffer: NodeJS.ArrayBufferView; + }>; + } + /** + * Returns the number of `bytesRead`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link read}. + * @since v0.1.21 + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, offset: number, length: number, position: ReadPosition | null): number; + /** + * Similar to the above `fs.readSync` function, this version takes an optional `options` object. + * If no `options` object is specified, it will default with the above values. + */ + export function readSync(fd: number, buffer: NodeJS.ArrayBufferView, opts?: ReadSyncOptions): number; + /** + * Asynchronously reads the entire contents of a file. + * + * ```js + * import { readFile } from 'fs'; + * + * readFile('/etc/passwd', (err, data) => { + * if (err) throw err; + * console.log(data); + * }); + * ``` + * + * The callback is passed two arguments `(err, data)`, where `data` is the + * contents of the file. + * + * If no encoding is specified, then the raw buffer is returned. + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { readFile } from 'fs'; + * + * readFile('/etc/passwd', 'utf8', callback); + * ``` + * + * When the path is a directory, the behavior of `fs.readFile()` and {@link readFileSync} is platform-specific. On macOS, Linux, and Windows, an + * error will be returned. On FreeBSD, a representation of the directory's contents + * will be returned. + * + * ```js + * import { readFile } from 'fs'; + * + * // macOS, Linux, and Windows + * readFile('', (err, data) => { + * // => [Error: EISDIR: illegal operation on a directory, read ] + * }); + * + * // FreeBSD + * readFile('', (err, data) => { + * // => null, + * }); + * ``` + * + * It is possible to abort an ongoing request using an `AbortSignal`. If a + * request is aborted the callback is called with an `AbortError`: + * + * ```js + * import { readFile } from 'fs'; + * + * const controller = new AbortController(); + * const signal = controller.signal; + * readFile(fileInfo[0].name, { signal }, (err, buf) => { + * // ... + * }); + * // When you want to abort the request + * controller.abort(); + * ``` + * + * The `fs.readFile()` function buffers the entire file. To minimize memory costs, + * when possible prefer streaming via `fs.createReadStream()`. + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * @since v0.1.29 + * @param path filename or file descriptor + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding?: null | undefined; + flag?: string | undefined; + } & Abortable) + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | ({ + encoding: BufferEncoding; + flag?: string | undefined; + } & Abortable) + | BufferEncoding, + callback: (err: NodeJS.ErrnoException | null, data: string) => void + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFile( + path: PathOrFileDescriptor, + options: + | (ObjectEncodingOptions & { + flag?: string | undefined; + } & Abortable) + | BufferEncoding + | undefined + | null, + callback: (err: NodeJS.ErrnoException | null, data: string | Buffer) => void + ): void; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + */ + export function readFile(path: PathOrFileDescriptor, callback: (err: NodeJS.ErrnoException | null, data: Buffer) => void): void; + export namespace readFile { + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function __promisify__( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null + ): Promise; + } + /** + * Returns the contents of the `path`. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readFile}. + * + * If the `encoding` option is specified then this function returns a + * string. Otherwise it returns a buffer. + * + * Similar to {@link readFile}, when the path is a directory, the behavior of`fs.readFileSync()` is platform-specific. + * + * ```js + * import { readFileSync } from 'fs'; + * + * // macOS, Linux, and Windows + * readFileSync(''); + * // => [Error: EISDIR: illegal operation on a directory, read ] + * + * // FreeBSD + * readFileSync(''); // => + * ``` + * @since v0.1.8 + * @param path filename or file descriptor + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: { + encoding?: null | undefined; + flag?: string | undefined; + } | null + ): Buffer; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options: + | { + encoding: BufferEncoding; + flag?: string | undefined; + } + | BufferEncoding + ): string; + /** + * Synchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param options Either the encoding for the result, or an object that contains the encoding and an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + export function readFileSync( + path: PathOrFileDescriptor, + options?: + | (ObjectEncodingOptions & { + flag?: string | undefined; + }) + | BufferEncoding + | null + ): string | Buffer; + export type WriteFileOptions = + | (ObjectEncodingOptions & + Abortable & { + mode?: Mode | undefined; + flag?: string | undefined; + }) + | BufferEncoding + | null; + /** + * When `file` is a filename, asynchronously writes data to the file, replacing the + * file if it already exists. `data` can be a string or a buffer. + * + * When `file` is a file descriptor, the behavior is similar to calling`fs.write()` directly (which is recommended). See the notes below on using + * a file descriptor. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { writeFile } from 'fs'; + * import { Buffer } from 'buffer'; + * + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, (err) => { + * if (err) throw err; + * console.log('The file has been saved!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { writeFile } from 'fs'; + * + * writeFile('message.txt', 'Hello Node.js', 'utf8', callback); + * ``` + * + * It is unsafe to use `fs.writeFile()` multiple times on the same file without + * waiting for the callback. For this scenario, {@link createWriteStream} is + * recommended. + * + * Similarly to `fs.readFile` \- `fs.writeFile` is a convenience method that + * performs multiple `write` calls internally to write the buffer passed to it. + * For performance sensitive code consider using {@link createWriteStream}. + * + * It is possible to use an `AbortSignal` to cancel an `fs.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'fs'; + * import { Buffer } from 'buffer'; + * + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * writeFile('message.txt', data, { signal }, (err) => { + * // When a request is aborted - the callback is called with an AbortError + * }); + * // When the request should be aborted + * controller.abort(); + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFile(file: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options: WriteFileOptions, callback: NoParamCallback): void; + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function writeFile(path: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, callback: NoParamCallback): void; + export namespace writeFile { + /** + * Asynchronously writes data to a file, replacing the file if it already exists. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'w'` is used. + */ + function __promisify__(path: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options?: WriteFileOptions): Promise; + } + /** + * Returns `undefined`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writeFile}. + * @since v0.1.29 + * @param file filename or file descriptor + */ + export function writeFileSync(file: PathOrFileDescriptor, data: string | NodeJS.ArrayBufferView, options?: WriteFileOptions): void; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFile } from 'fs'; + * + * appendFile('message.txt', 'data to append', (err) => { + * if (err) throw err; + * console.log('The "data to append" was appended to file!'); + * }); + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFile } from 'fs'; + * + * appendFile('message.txt', 'data to append', 'utf8', callback); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { open, close, appendFile } from 'fs'; + * + * function closeFd(fd) { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * + * open('message.txt', 'a', (err, fd) => { + * if (err) throw err; + * + * try { + * appendFile(fd, 'data to append', 'utf8', (err) => { + * closeFd(fd); + * if (err) throw err; + * }); + * } catch (err) { + * closeFd(fd); + * throw err; + * } + * }); + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFile(path: PathOrFileDescriptor, data: string | Uint8Array, options: WriteFileOptions, callback: NoParamCallback): void; + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + */ + export function appendFile(file: PathOrFileDescriptor, data: string | Uint8Array, callback: NoParamCallback): void; + export namespace appendFile { + /** + * Asynchronously append data to a file, creating the file if it does not exist. + * @param file A path to a file. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + * If a file descriptor is provided, the underlying file will _not_ be closed automatically. + * @param data The data to write. If something other than a Buffer or Uint8Array is provided, the value is coerced to a string. + * @param options Either the encoding for the file, or an object optionally specifying the encoding, file mode, and flag. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `mode` is not supplied, the default of `0o666` is used. + * If `mode` is a string, it is parsed as an octal integer. + * If `flag` is not supplied, the default of `'a'` is used. + */ + function __promisify__(file: PathOrFileDescriptor, data: string | Uint8Array, options?: WriteFileOptions): Promise; + } + /** + * Synchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * The `mode` option only affects the newly created file. See {@link open} for more details. + * + * ```js + * import { appendFileSync } from 'fs'; + * + * try { + * appendFileSync('message.txt', 'data to append'); + * console.log('The "data to append" was appended to file!'); + * } catch (err) { + * // Handle the error + * } + * ``` + * + * If `options` is a string, then it specifies the encoding: + * + * ```js + * import { appendFileSync } from 'fs'; + * + * appendFileSync('message.txt', 'data to append', 'utf8'); + * ``` + * + * The `path` may be specified as a numeric file descriptor that has been opened + * for appending (using `fs.open()` or `fs.openSync()`). The file descriptor will + * not be closed automatically. + * + * ```js + * import { openSync, closeSync, appendFileSync } from 'fs'; + * + * let fd; + * + * try { + * fd = openSync('message.txt', 'a'); + * appendFileSync(fd, 'data to append', 'utf8'); + * } catch (err) { + * // Handle the error + * } finally { + * if (fd !== undefined) + * closeSync(fd); + * } + * ``` + * @since v0.6.7 + * @param path filename or file descriptor + */ + export function appendFileSync(path: PathOrFileDescriptor, data: string | Uint8Array, options?: WriteFileOptions): void; + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export interface WatchFileOptions { + bigint?: boolean | undefined; + persistent?: boolean | undefined; + interval?: number | undefined; + } + /** + * Watch for changes on `filename`. The callback `listener` will be called each + * time the file is accessed. + * + * The `options` argument may be omitted. If provided, it should be an object. The`options` object may contain a boolean named `persistent` that indicates + * whether the process should continue to run as long as files are being watched. + * The `options` object may specify an `interval` property indicating how often the + * target should be polled in milliseconds. + * + * The `listener` gets two arguments the current stat object and the previous + * stat object: + * + * ```js + * import { watchFile } from 'fs'; + * + * watchFile('message.text', (curr, prev) => { + * console.log(`the current mtime is: ${curr.mtime}`); + * console.log(`the previous mtime was: ${prev.mtime}`); + * }); + * ``` + * + * These stat objects are instances of `fs.Stat`. If the `bigint` option is `true`, + * the numeric values in these objects are specified as `BigInt`s. + * + * To be notified when the file was modified, not just accessed, it is necessary + * to compare `curr.mtimeMs` and `prev.mtimeMs`. + * + * When an `fs.watchFile` operation results in an `ENOENT` error, it + * will invoke the listener once, with all the fields zeroed (or, for dates, the + * Unix Epoch). If the file is created later on, the listener will be called + * again, with the latest stat objects. This is a change in functionality since + * v0.10. + * + * Using {@link watch} is more efficient than `fs.watchFile` and`fs.unwatchFile`. `fs.watch` should be used instead of `fs.watchFile` and`fs.unwatchFile` when possible. + * + * When a file being watched by `fs.watchFile()` disappears and reappears, + * then the contents of `previous` in the second callback event (the file's + * reappearance) will be the same as the contents of `previous` in the first + * callback event (its disappearance). + * + * This happens when: + * + * * the file is deleted, followed by a restore + * * the file is renamed and then renamed a second time back to its original name + * @since v0.1.31 + */ + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint?: false | undefined; + }) + | undefined, + listener: (curr: Stats, prev: Stats) => void + ): StatWatcher; + export function watchFile( + filename: PathLike, + options: + | (WatchFileOptions & { + bigint: true; + }) + | undefined, + listener: (curr: BigIntStats, prev: BigIntStats) => void + ): StatWatcher; + /** + * Watch for changes on `filename`. The callback `listener` will be called each time the file is accessed. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watchFile(filename: PathLike, listener: (curr: Stats, prev: Stats) => void): StatWatcher; + /** + * Stop watching for changes on `filename`. If `listener` is specified, only that + * particular listener is removed. Otherwise, _all_ listeners are removed, + * effectively stopping watching of `filename`. + * + * Calling `fs.unwatchFile()` with a filename that is not being watched is a + * no-op, not an error. + * + * Using {@link watch} is more efficient than `fs.watchFile()` and`fs.unwatchFile()`. `fs.watch()` should be used instead of `fs.watchFile()`and `fs.unwatchFile()` when possible. + * @since v0.1.31 + * @param listener Optional, a listener previously attached using `fs.watchFile()` + */ + export function unwatchFile(filename: PathLike, listener?: (curr: Stats, prev: Stats) => void): void; + export interface WatchOptions extends Abortable { + encoding?: BufferEncoding | 'buffer' | undefined; + persistent?: boolean | undefined; + recursive?: boolean | undefined; + } + export type WatchEventType = 'rename' | 'change'; + export type WatchListener = (event: WatchEventType, filename: T) => void; + /** + * Watch for changes on `filename`, where `filename` is either a file or a + * directory. + * + * The second argument is optional. If `options` is provided as a string, it + * specifies the `encoding`. Otherwise `options` should be passed as an object. + * + * The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file + * which triggered the event. + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`. + * + * If a `signal` is passed, aborting the corresponding AbortController will close + * the returned `fs.FSWatcher`. + * @since v0.5.10 + * @param listener + */ + export function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: 'buffer'; + }) + | 'buffer', + listener?: WatchListener + ): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch(filename: PathLike, options?: WatchOptions | BufferEncoding | null, listener?: WatchListener): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + export function watch(filename: PathLike, options: WatchOptions | string, listener?: WatchListener): FSWatcher; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function watch(filename: PathLike, listener?: WatchListener): FSWatcher; + /** + * Test whether or not the given path exists by checking with the file system. + * Then call the `callback` argument with either true or false: + * + * ```js + * import { exists } from 'fs'; + * + * exists('/etc/passwd', (e) => { + * console.log(e ? 'it exists' : 'no passwd!'); + * }); + * ``` + * + * **The parameters for this callback are not consistent with other Node.js** + * **callbacks.** Normally, the first parameter to a Node.js callback is an `err`parameter, optionally followed by other parameters. The `fs.exists()` callback + * has only one boolean parameter. This is one reason `fs.access()` is recommended + * instead of `fs.exists()`. + * + * Using `fs.exists()` to check for the existence of a file before calling`fs.open()`, `fs.readFile()` or `fs.writeFile()` is not recommended. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file does not exist. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { exists, open, close } from 'fs'; + * + * exists('myfile', (e) => { + * if (e) { + * console.error('myfile already exists'); + * } else { + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { open, close, exists } from 'fs'; + * + * exists('myfile', (e) => { + * if (e) { + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * } else { + * console.error('myfile does not exist'); + * } + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for existence and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the existence of a file only if the file won’t be + * used directly, for example when its existence is a signal from another + * process. + * @since v0.0.2 + * @deprecated Since v1.0.0 - Use {@link stat} or {@link access} instead. + */ + export function exists(path: PathLike, callback: (exists: boolean) => void): void; + /** @deprecated */ + export namespace exists { + /** + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike): Promise; + } + /** + * Returns `true` if the path exists, `false` otherwise. + * + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link exists}. + * + * `fs.exists()` is deprecated, but `fs.existsSync()` is not. The `callback`parameter to `fs.exists()` accepts parameters that are inconsistent with other + * Node.js callbacks. `fs.existsSync()` does not use a callback. + * + * ```js + * import { existsSync } from 'fs'; + * + * if (existsSync('/etc/passwd')) + * console.log('The path exists.'); + * ``` + * @since v0.1.21 + */ + export function existsSync(path: PathLike): boolean; + export namespace constants { + // File Access Constants + /** Constant for fs.access(). File is visible to the calling process. */ + const F_OK: number; + /** Constant for fs.access(). File can be read by the calling process. */ + const R_OK: number; + /** Constant for fs.access(). File can be written by the calling process. */ + const W_OK: number; + /** Constant for fs.access(). File can be executed by the calling process. */ + const X_OK: number; + // File Copy Constants + /** Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. */ + const COPYFILE_EXCL: number; + /** + * Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. + */ + const COPYFILE_FICLONE: number; + /** + * Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. + * If the underlying platform does not support copy-on-write, then the operation will fail with an error. + */ + const COPYFILE_FICLONE_FORCE: number; + // File Open Constants + /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ + const O_RDONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ + const O_WRONLY: number; + /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ + const O_RDWR: number; + /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ + const O_CREAT: number; + /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ + const O_EXCL: number; + /** + * Constant for fs.open(). Flag indicating that if path identifies a terminal device, + * opening the path shall not cause that terminal to become the controlling terminal for the process + * (if the process does not already have one). + */ + const O_NOCTTY: number; + /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ + const O_TRUNC: number; + /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ + const O_APPEND: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ + const O_DIRECTORY: number; + /** + * constant for fs.open(). + * Flag indicating reading accesses to the file system will no longer result in + * an update to the atime information associated with the file. + * This flag is available on Linux operating systems only. + */ + const O_NOATIME: number; + /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ + const O_NOFOLLOW: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ + const O_SYNC: number; + /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. */ + const O_DSYNC: number; + /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ + const O_SYMLINK: number; + /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ + const O_DIRECT: number; + /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ + const O_NONBLOCK: number; + // File Type Constants + /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ + const S_IFMT: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ + const S_IFREG: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ + const S_IFDIR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ + const S_IFCHR: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ + const S_IFBLK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ + const S_IFIFO: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ + const S_IFLNK: number; + /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ + const S_IFSOCK: number; + // File Mode Constants + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ + const S_IRWXU: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ + const S_IRUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ + const S_IWUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ + const S_IXUSR: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ + const S_IRWXG: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ + const S_IRGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ + const S_IWGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ + const S_IXGRP: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ + const S_IRWXO: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ + const S_IROTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ + const S_IWOTH: number; + /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ + const S_IXOTH: number; + /** + * When set, a memory file mapping is used to access the file. This flag + * is available on Windows operating systems only. On other operating systems, + * this flag is ignored. + */ + const UV_FS_O_FILEMAP: number; + } + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * The final argument, `callback`, is a callback function that is invoked with + * a possible error argument. If any of the accessibility checks fail, the error + * argument will be an `Error` object. The following examples check if`package.json` exists, and if it is readable or writable. + * + * ```js + * import { access, constants } from 'fs'; + * + * const file = 'package.json'; + * + * // Check if the file exists in the current directory. + * access(file, constants.F_OK, (err) => { + * console.log(`${file} ${err ? 'does not exist' : 'exists'}`); + * }); + * + * // Check if the file is readable. + * access(file, constants.R_OK, (err) => { + * console.log(`${file} ${err ? 'is not readable' : 'is readable'}`); + * }); + * + * // Check if the file is writable. + * access(file, constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not writable' : 'is writable'}`); + * }); + * + * // Check if the file is readable and writable. + * access(file, constants.R_OK | constants.W_OK, (err) => { + * console.log(`${file} ${err ? 'is not' : 'is'} readable and writable`); + * }); + * ``` + * + * Do not use `fs.access()` to check for the accessibility of a file before calling`fs.open()`, `fs.readFile()` or `fs.writeFile()`. Doing + * so introduces a race condition, since other processes may change the file's + * state between the two calls. Instead, user code should open/read/write the + * file directly and handle the error raised if the file is not accessible. + * + * **write (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'fs'; + * + * access('myfile', (err) => { + * if (!err) { + * console.error('myfile already exists'); + * return; + * } + * + * open('myfile', 'wx', (err, fd) => { + * if (err) throw err; + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **write (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * + * open('myfile', 'wx', (err, fd) => { + * if (err) { + * if (err.code === 'EEXIST') { + * console.error('myfile already exists'); + * return; + * } + * + * throw err; + * } + * + * try { + * writeMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * **read (NOT RECOMMENDED)** + * + * ```js + * import { access, open, close } from 'fs'; + * access('myfile', (err) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * open('myfile', 'r', (err, fd) => { + * if (err) throw err; + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * }); + * ``` + * + * **read (RECOMMENDED)** + * + * ```js + * import { open, close } from 'fs'; + * + * open('myfile', 'r', (err, fd) => { + * if (err) { + * if (err.code === 'ENOENT') { + * console.error('myfile does not exist'); + * return; + * } + * + * throw err; + * } + * + * try { + * readMyData(fd); + * } finally { + * close(fd, (err) => { + * if (err) throw err; + * }); + * } + * }); + * ``` + * + * The "not recommended" examples above check for accessibility and then use the + * file; the "recommended" examples are better because they use the file directly + * and handle the error, if any. + * + * In general, check for the accessibility of a file only if the file will not be + * used directly, for example when its accessibility is a signal from another + * process. + * + * On Windows, access-control policies (ACLs) on a directory may limit access to + * a file or directory. The `fs.access()` function, however, does not check the + * ACL and therefore may report that a path is accessible even if the ACL restricts + * the user from reading or writing to it. + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function access(path: PathLike, mode: number | undefined, callback: NoParamCallback): void; + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + */ + export function access(path: PathLike, callback: NoParamCallback): void; + export namespace access { + /** + * Asynchronously tests a user's permissions for the file specified by path. + * @param path A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * URL support is _experimental_. + */ + function __promisify__(path: PathLike, mode?: number): Promise; + } + /** + * Synchronously tests a user's permissions for the file or directory specified + * by `path`. The `mode` argument is an optional integer that specifies the + * accessibility checks to be performed. `mode` should be either the value`fs.constants.F_OK` or a mask consisting of the bitwise OR of any of`fs.constants.R_OK`, `fs.constants.W_OK`, and + * `fs.constants.X_OK` (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If any of the accessibility checks fail, an `Error` will be thrown. Otherwise, + * the method will return `undefined`. + * + * ```js + * import { accessSync, constants } from 'fs'; + * + * try { + * accessSync('etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can read/write'); + * } catch (err) { + * console.error('no access!'); + * } + * ``` + * @since v0.11.15 + * @param [mode=fs.constants.F_OK] + */ + export function accessSync(path: PathLike, mode?: number): void; + interface StreamOptions { + flags?: string | undefined; + encoding?: BufferEncoding | undefined; + fd?: number | promises.FileHandle | undefined; + mode?: number | undefined; + autoClose?: boolean | undefined; + /** + * @default false + */ + emitClose?: boolean | undefined; + start?: number | undefined; + highWaterMark?: number | undefined; + } + interface ReadStreamOptions extends StreamOptions { + end?: number | undefined; + } + /** + * Unlike the 16 kb default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 kb. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `fd` is specified and `start` is + * omitted or `undefined`, `fs.createReadStream()` reads sequentially from the + * current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `fd` is specified, `ReadStream` will ignore the `path` argument and will use + * the specified file descriptor. This means that no `'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s should be passed to `net.Socket`. + * + * If `fd` points to a character device that only supports blocking reads + * (such as keyboard or sound card), read operations do not finish until data is + * available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option, it is possible to override the corresponding `fs`implementations for `open`, `read`, and `close`. When providing the `fs` option, + * an override for `read` is required. If no `fd` is provided, an override for`open` is also required. If `autoClose` is `true`, an override for `close` is + * also required. + * + * ```js + * import { createReadStream } from 'fs'; + * + * // Create a stream from some character device. + * const stream = createReadStream('/dev/input/event0'); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * `mode` sets the file mode (permission and sticky bits), but only if the + * file was created. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { createReadStream } from 'fs'; + * + * createReadStream('sample.txt', { start: 90, end: 99 }); + * ``` + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createReadStream(path: PathLike, options?: BufferEncoding | ReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` option to be set to `r+` rather than the + * default `w`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * By providing the `fs` option it is possible to override the corresponding `fs`implementations for `open`, `write`, `writev` and `close`. Overriding `write()`without `writev()` can reduce + * performance as some optimizations (`_writev()`) + * will be disabled. When providing the `fs` option, overrides for at least one of`write` and `writev` are required. If no `fd` option is supplied, an override + * for `open` is also required. If `autoClose` is `true`, an override for `close`is also required. + * + * Like `fs.ReadStream`, if `fd` is specified, `fs.WriteStream` will ignore the`path` argument and will use the specified file descriptor. This means that no`'open'` event will be + * emitted. `fd` should be blocking; non-blocking `fd`s + * should be passed to `net.Socket`. + * + * If `options` is a string, then it specifies the encoding. + * @since v0.1.31 + */ + export function createWriteStream(path: PathLike, options?: BufferEncoding | StreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. No arguments other + * than a possible + * exception are given to the completion callback. + * @since v0.1.96 + */ + export function fdatasync(fd: number, callback: NoParamCallback): void; + export namespace fdatasync { + /** + * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device. + * @param fd A file descriptor. + */ + function __promisify__(fd: number): Promise; + } + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`. + * @since v0.1.96 + */ + export function fdatasyncSync(fd: number): void; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. No arguments other than a possible exception are given to the + * callback function. Node.js makes no guarantees about the atomicity of the copy + * operation. If an error occurs after the destination file has been opened for + * writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFile, constants } from 'fs'; + * + * function callback(err) { + * if (err) throw err; + * console.log('source.txt was copied to destination.txt'); + * } + * + * // destination.txt will be created or overwritten by default. + * copyFile('source.txt', 'destination.txt', callback); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL, callback); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFile(src: PathLike, dest: PathLike, callback: NoParamCallback): void; + export function copyFile(src: PathLike, dest: PathLike, mode: number, callback: NoParamCallback): void; + export namespace copyFile { + function __promisify__(src: PathLike, dst: PathLike, mode?: number): Promise; + } + /** + * Synchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. Returns `undefined`. Node.js makes no guarantees about the + * atomicity of the copy operation. If an error occurs after the destination file + * has been opened for writing, Node.js will attempt to remove the destination. + * + * `mode` is an optional integer that specifies the behavior + * of the copy operation. It is possible to create a mask consisting of the bitwise + * OR of two or more values (e.g.`fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`). + * + * * `fs.constants.COPYFILE_EXCL`: The copy operation will fail if `dest` already + * exists. + * * `fs.constants.COPYFILE_FICLONE`: The copy operation will attempt to create a + * copy-on-write reflink. If the platform does not support copy-on-write, then a + * fallback copy mechanism is used. + * * `fs.constants.COPYFILE_FICLONE_FORCE`: The copy operation will attempt to + * create a copy-on-write reflink. If the platform does not support + * copy-on-write, then the operation will fail. + * + * ```js + * import { copyFileSync, constants } from 'fs'; + * + * // destination.txt will be created or overwritten by default. + * copyFileSync('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * copyFileSync('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * ``` + * @since v8.5.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] modifiers for copy operation. + */ + export function copyFileSync(src: PathLike, dest: PathLike, mode?: number): void; + /** + * Write an array of `ArrayBufferView`s to the file specified by `fd` using`writev()`. + * + * `position` is the offset from the beginning of the file where this data + * should be written. If `typeof position !== 'number'`, the data will be written + * at the current position. + * + * The callback will be given three arguments: `err`, `bytesWritten`, and`buffers`. `bytesWritten` is how many bytes were written from `buffers`. + * + * If this method is `util.promisify()` ed, it returns a promise for an`Object` with `bytesWritten` and `buffers` properties. + * + * It is unsafe to use `fs.writev()` multiple times on the same file without + * waiting for the callback. For this scenario, use {@link createWriteStream}. + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + */ + export function writev(fd: number, buffers: ReadonlyArray, cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void): void; + export function writev( + fd: number, + buffers: ReadonlyArray, + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesWritten: number, buffers: NodeJS.ArrayBufferView[]) => void + ): void; + export interface WriteVResult { + bytesWritten: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace writev { + function __promisify__(fd: number, buffers: ReadonlyArray, position?: number): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link writev}. + * @since v12.9.0 + * @return The number of bytes written. + */ + export function writevSync(fd: number, buffers: ReadonlyArray, position?: number): number; + /** + * Read from a file specified by `fd` and write to an array of `ArrayBufferView`s + * using `readv()`. + * + * `position` is the offset from the beginning of the file from where data + * should be read. If `typeof position !== 'number'`, the data will be read + * from the current position. + * + * The callback will be given three arguments: `err`, `bytesRead`, and`buffers`. `bytesRead` is how many bytes were read from the file. + * + * If this method is invoked as its `util.promisify()` ed version, it returns + * a promise for an `Object` with `bytesRead` and `buffers` properties. + * @since v13.13.0, v12.17.0 + */ + export function readv(fd: number, buffers: ReadonlyArray, cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void): void; + export function readv( + fd: number, + buffers: ReadonlyArray, + position: number, + cb: (err: NodeJS.ErrnoException | null, bytesRead: number, buffers: NodeJS.ArrayBufferView[]) => void + ): void; + export interface ReadVResult { + bytesRead: number; + buffers: NodeJS.ArrayBufferView[]; + } + export namespace readv { + function __promisify__(fd: number, buffers: ReadonlyArray, position?: number): Promise; + } + /** + * For detailed information, see the documentation of the asynchronous version of + * this API: {@link readv}. + * @since v13.13.0, v12.17.0 + * @return The number of bytes read. + */ + export function readvSync(fd: number, buffers: ReadonlyArray, position?: number): number; + export interface OpenDirOptions { + encoding?: BufferEncoding | undefined; + /** + * Number of directory entries that are buffered + * internally when reading from the directory. Higher values lead to better + * performance but higher memory usage. + * @default 32 + */ + bufferSize?: number | undefined; + } + /** + * Synchronously open a directory. See [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html). + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendirSync(path: PathLike, options?: OpenDirOptions): Dir; + /** + * Asynchronously open a directory. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for + * more details. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * @since v12.12.0 + */ + export function opendir(path: PathLike, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export function opendir(path: PathLike, options: OpenDirOptions, cb: (err: NodeJS.ErrnoException | null, dir: Dir) => void): void; + export namespace opendir { + function __promisify__(path: PathLike, options?: OpenDirOptions): Promise; + } + export interface BigIntStats extends StatsBase { + atimeNs: bigint; + mtimeNs: bigint; + ctimeNs: bigint; + birthtimeNs: bigint; + } + export interface BigIntOptions { + bigint: true; + } + export interface StatOptions { + bigint?: boolean | undefined; + } + export interface StatSyncOptions extends StatOptions { + throwIfNoEntry?: boolean | undefined; + } + interface CopyOptionsBase { + /** + * Dereference symlinks + * @default false + */ + dereference?: boolean; + /** + * When `force` is `false`, and the destination + * exists, throw an error. + * @default false + */ + errorOnExist?: boolean; + /** + * Overwrite existing file or directory. _The copy + * operation will ignore errors if you set this to false and the destination + * exists. Use the `errorOnExist` option to change this behavior. + * @default true + */ + force?: boolean; + /** + * When `true` timestamps from `src` will + * be preserved. + * @default false + */ + preserveTimestamps?: boolean; + /** + * Copy directories recursively. + * @default false + */ + recursive?: boolean; + /** + * When true, path resolution for symlinks will be skipped + * @default false + */ + verbatimSymlinks?: boolean; + } + export interface CopyOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean | Promise; + } + export interface CopySyncOptions extends CopyOptionsBase { + /** + * Function to filter copied files/directories. Return + * `true` to copy the item, `false` to ignore it. + */ + filter?(source: string, destination: string): boolean; + } + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cp(source: string | URL, destination: string | URL, callback: (err: NodeJS.ErrnoException | null) => void): void; + export function cp(source: string | URL, destination: string | URL, opts: CopyOptions, callback: (err: NodeJS.ErrnoException | null) => void): void; + /** + * Synchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + */ + export function cpSync(source: string | URL, destination: string | URL, opts?: CopySyncOptions): void; +} +declare module 'node:fs' { + export * from 'fs'; +} diff --git a/node_modules/@types/node/ts4.8/fs/promises.d.ts b/node_modules/@types/node/ts4.8/fs/promises.d.ts new file mode 100755 index 0000000..aca2fd5 --- /dev/null +++ b/node_modules/@types/node/ts4.8/fs/promises.d.ts @@ -0,0 +1,1138 @@ +/** + * The `fs/promises` API provides asynchronous file system methods that return + * promises. + * + * The promise APIs use the underlying Node.js threadpool to perform file + * system operations off the event loop thread. These operations are not + * synchronized or threadsafe. Care must be taken when performing multiple + * concurrent modifications on the same file or data corruption may occur. + * @since v10.0.0 + */ +declare module 'fs/promises' { + import { Abortable } from 'node:events'; + import { Stream } from 'node:stream'; + import { ReadableStream } from 'node:stream/web'; + import { + BigIntStats, + BufferEncodingOption, + constants as fsConstants, + CopyOptions, + Dir, + Dirent, + MakeDirectoryOptions, + Mode, + ObjectEncodingOptions, + OpenDirOptions, + OpenMode, + PathLike, + ReadStream, + ReadVResult, + RmDirOptions, + RmOptions, + StatOptions, + Stats, + TimeLike, + WatchEventType, + WatchOptions, + WriteStream, + WriteVResult, + } from 'node:fs'; + import { Interface as ReadlineInterface } from 'node:readline'; + + interface FileChangeInfo { + eventType: WatchEventType; + filename: T; + } + interface FlagAndOpenMode { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } + interface FileReadResult { + bytesRead: number; + buffer: T; + } + interface FileReadOptions { + /** + * @default `Buffer.alloc(0xffff)` + */ + buffer?: T; + /** + * @default 0 + */ + offset?: number | null; + /** + * @default `buffer.byteLength` + */ + length?: number | null; + position?: number | null; + } + interface CreateReadStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + end?: number | undefined; + highWaterMark?: number | undefined; + } + interface CreateWriteStreamOptions { + encoding?: BufferEncoding | null | undefined; + autoClose?: boolean | undefined; + emitClose?: boolean | undefined; + start?: number | undefined; + } + // TODO: Add `EventEmitter` close + interface FileHandle { + /** + * The numeric file descriptor managed by the {FileHandle} object. + * @since v10.0.0 + */ + readonly fd: number; + /** + * Alias of `filehandle.writeFile()`. + * + * When operating on file handles, the mode cannot be changed from what it was set + * to with `fsPromises.open()`. Therefore, this is equivalent to `filehandle.writeFile()`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + appendFile(data: string | Uint8Array, options?: (ObjectEncodingOptions & FlagAndOpenMode) | BufferEncoding | null): Promise; + /** + * Changes the ownership of the file. A wrapper for [`chown(2)`](http://man7.org/linux/man-pages/man2/chown.2.html). + * @since v10.0.0 + * @param uid The file's new owner's user id. + * @param gid The file's new group's group id. + * @return Fulfills with `undefined` upon success. + */ + chown(uid: number, gid: number): Promise; + /** + * Modifies the permissions on the file. See [`chmod(2)`](http://man7.org/linux/man-pages/man2/chmod.2.html). + * @since v10.0.0 + * @param mode the file mode bit mask. + * @return Fulfills with `undefined` upon success. + */ + chmod(mode: Mode): Promise; + /** + * Unlike the 16 kb default `highWaterMark` for a `stream.Readable`, the stream + * returned by this method has a default `highWaterMark` of 64 kb. + * + * `options` can include `start` and `end` values to read a range of bytes from + * the file instead of the entire file. Both `start` and `end` are inclusive and + * start counting at 0, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. If `start` is + * omitted or `undefined`, `filehandle.createReadStream()` reads sequentially from + * the current file position. The `encoding` can be any one of those accepted by `Buffer`. + * + * If the `FileHandle` points to a character device that only supports blocking + * reads (such as keyboard or sound card), read operations do not finish until data + * is available. This can prevent the process from exiting and the stream from + * closing naturally. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * + * ```js + * import { open } from 'fs/promises'; + * + * const fd = await open('/dev/input/event0'); + * // Create a stream from some character device. + * const stream = fd.createReadStream(); + * setTimeout(() => { + * stream.close(); // This may not close the stream. + * // Artificially marking end-of-stream, as if the underlying resource had + * // indicated end-of-file by itself, allows the stream to close. + * // This does not cancel pending read operations, and if there is such an + * // operation, the process may still not be able to exit successfully + * // until it finishes. + * stream.push(null); + * stream.read(0); + * }, 100); + * ``` + * + * If `autoClose` is false, then the file descriptor won't be closed, even if + * there's an error. It is the application's responsibility to close it and make + * sure there's no file descriptor leak. If `autoClose` is set to true (default + * behavior), on `'error'` or `'end'` the file descriptor will be closed + * automatically. + * + * An example to read the last 10 bytes of a file which is 100 bytes long: + * + * ```js + * import { open } from 'fs/promises'; + * + * const fd = await open('sample.txt'); + * fd.createReadStream({ start: 90, end: 99 }); + * ``` + * @since v16.11.0 + */ + createReadStream(options?: CreateReadStreamOptions): ReadStream; + /** + * `options` may also include a `start` option to allow writing data at some + * position past the beginning of the file, allowed values are in the + * \[0, [`Number.MAX_SAFE_INTEGER`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER)\] range. Modifying a file rather than + * replacing it may require the `flags` `open` option to be set to `r+` rather than + * the default `r`. The `encoding` can be any one of those accepted by `Buffer`. + * + * If `autoClose` is set to true (default behavior) on `'error'` or `'finish'`the file descriptor will be closed automatically. If `autoClose` is false, + * then the file descriptor won't be closed, even if there's an error. + * It is the application's responsibility to close it and make sure there's no + * file descriptor leak. + * + * By default, the stream will emit a `'close'` event after it has been + * destroyed. Set the `emitClose` option to `false` to change this behavior. + * @since v16.11.0 + */ + createWriteStream(options?: CreateWriteStreamOptions): WriteStream; + /** + * Forces all currently queued I/O operations associated with the file to the + * operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. + * + * Unlike `filehandle.sync` this method does not flush modified metadata. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + datasync(): Promise; + /** + * Request that all data for the open file descriptor is flushed to the storage + * device. The specific implementation is operating system and device specific. + * Refer to the POSIX [`fsync(2)`](http://man7.org/linux/man-pages/man2/fsync.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fufills with `undefined` upon success. + */ + sync(): Promise; + /** + * Reads data from the file and stores that in the given buffer. + * + * If the file is not modified concurrently, the end-of-file is reached when the + * number of bytes read is zero. + * @since v10.0.0 + * @param buffer A buffer that will be filled with the file data read. + * @param offset The location in the buffer at which to start filling. + * @param length The number of bytes to read. + * @param position The location where to begin reading data from the file. If `null`, data will be read from the current file position, and the position will be updated. If `position` is an + * integer, the current file position will remain unchanged. + * @return Fulfills upon success with an object with two properties: + */ + read(buffer: T, offset?: number | null, length?: number | null, position?: number | null): Promise>; + read(options?: FileReadOptions): Promise>; + /** + * Returns a `ReadableStream` that may be used to read the files data. + * + * An error will be thrown if this method is called more than once or is called after the `FileHandle` is closed + * or closing. + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const chunk of file.readableWebStream()) + * console.log(chunk); + * + * await file.close(); + * ``` + * + * While the `ReadableStream` will read the file to completion, it will not close the `FileHandle` automatically. User code must still call the `fileHandle.close()` method. + * + * @since v17.0.0 + * @experimental + */ + readableWebStream(): ReadableStream; + /** + * Asynchronously reads the entire contents of a file. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support reading. + * + * If one or more `filehandle.read()` calls are made on a file handle and then a`filehandle.readFile()` call is made, the data will be read from the current + * position till the end of the file. It doesn't always read from the beginning + * of the file. + * @since v10.0.0 + * @return Fulfills upon a successful read with the contents of the file. If no encoding is specified (using `options.encoding`), the data is returned as a {Buffer} object. Otherwise, the + * data will be a string. + */ + readFile( + options?: { + encoding?: null | undefined; + flag?: OpenMode | undefined; + } | null + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options: + | { + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } + | BufferEncoding + ): Promise; + /** + * Asynchronously reads the entire contents of a file. The underlying file will _not_ be closed automatically. + * The `FileHandle` must have been opened for reading. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + readFile( + options?: + | (ObjectEncodingOptions & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Convenience method to create a `readline` interface and stream over the file. For example: + * + * ```js + * import { open } from 'node:fs/promises'; + * + * const file = await open('./some/file/to/read'); + * + * for await (const line of file.readLines()) { + * console.log(line); + * } + * ``` + * + * @since v18.11.0 + * @param options See `filehandle.createReadStream()` for the options. + */ + readLines(options?: CreateReadStreamOptions): ReadlineInterface; + /** + * @since v10.0.0 + * @return Fulfills with an {fs.Stats} for the file. + */ + stat( + opts?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + stat( + opts: StatOptions & { + bigint: true; + } + ): Promise; + stat(opts?: StatOptions): Promise; + /** + * Truncates the file. + * + * If the file was larger than `len` bytes, only the first `len` bytes will be + * retained in the file. + * + * The following example retains only the first four bytes of the file: + * + * ```js + * import { open } from 'fs/promises'; + * + * let filehandle = null; + * try { + * filehandle = await open('temp.txt', 'r+'); + * await filehandle.truncate(4); + * } finally { + * await filehandle?.close(); + * } + * ``` + * + * If the file previously was shorter than `len` bytes, it is extended, and the + * extended part is filled with null bytes (`'\0'`): + * + * If `len` is negative then `0` will be used. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + truncate(len?: number): Promise; + /** + * Change the file system timestamps of the object referenced by the `FileHandle` then resolves the promise with no arguments upon success. + * @since v10.0.0 + */ + utimes(atime: TimeLike, mtime: TimeLike): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface) or + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * The promise is resolved with no arguments upon success. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `FileHandle` has to support writing. + * + * It is unsafe to use `filehandle.writeFile()` multiple times on the same file + * without waiting for the promise to be resolved (or rejected). + * + * If one or more `filehandle.write()` calls are made on a file handle and then a`filehandle.writeFile()` call is made, the data will be written from the + * current position till the end of the file. It doesn't always write from the + * beginning of the file. + * @since v10.0.0 + */ + writeFile(data: string | Uint8Array, options?: (ObjectEncodingOptions & FlagAndOpenMode & Abortable) | BufferEncoding | null): Promise; + /** + * Write `buffer` to the file. + * + * The promise is resolved with an object containing two properties: + * + * It is unsafe to use `filehandle.write()` multiple times on the same file + * without waiting for the promise to be resolved (or rejected). For this + * scenario, use `filehandle.createWriteStream()`. + * + * On Linux, positional writes do not work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v10.0.0 + * @param [offset=0] The start position from within `buffer` where the data to write begins. + * @param [length=buffer.byteLength - offset] The number of bytes from `buffer` to write. + * @param position The offset from the beginning of the file where the data from `buffer` should be written. If `position` is not a `number`, the data will be written at the current position. + * See the POSIX pwrite(2) documentation for more detail. + */ + write( + buffer: TBuffer, + offset?: number | null, + length?: number | null, + position?: number | null + ): Promise<{ + bytesWritten: number; + buffer: TBuffer; + }>; + write( + data: string, + position?: number | null, + encoding?: BufferEncoding | null + ): Promise<{ + bytesWritten: number; + buffer: string; + }>; + /** + * Write an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s to the file. + * + * The promise is resolved with an object containing a two properties: + * + * It is unsafe to call `writev()` multiple times on the same file without waiting + * for the promise to be resolved (or rejected). + * + * On Linux, positional writes don't work when the file is opened in append mode. + * The kernel ignores the position argument and always appends the data to + * the end of the file. + * @since v12.9.0 + * @param position The offset from the beginning of the file where the data from `buffers` should be written. If `position` is not a `number`, the data will be written at the current + * position. + */ + writev(buffers: ReadonlyArray, position?: number): Promise; + /** + * Read from a file and write to an array of [ArrayBufferView](https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView) s + * @since v13.13.0, v12.17.0 + * @param position The offset from the beginning of the file where the data should be read from. If `position` is not a `number`, the data will be read from the current position. + * @return Fulfills upon success an object containing two properties: + */ + readv(buffers: ReadonlyArray, position?: number): Promise; + /** + * Closes the file handle after waiting for any pending operation on the handle to + * complete. + * + * ```js + * import { open } from 'fs/promises'; + * + * let filehandle; + * try { + * filehandle = await open('thefile.txt', 'r'); + * } finally { + * await filehandle?.close(); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + close(): Promise; + } + + const constants: typeof fsConstants; + + /** + * Tests a user's permissions for the file or directory specified by `path`. + * The `mode` argument is an optional integer that specifies the accessibility + * checks to be performed. `mode` should be either the value `fs.constants.F_OK`or a mask consisting of the bitwise OR of any of `fs.constants.R_OK`,`fs.constants.W_OK`, and `fs.constants.X_OK` + * (e.g.`fs.constants.W_OK | fs.constants.R_OK`). Check `File access constants` for + * possible values of `mode`. + * + * If the accessibility check is successful, the promise is resolved with no + * value. If any of the accessibility checks fail, the promise is rejected + * with an [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object. The following example checks if the file`/etc/passwd` can be read and + * written by the current process. + * + * ```js + * import { access } from 'fs/promises'; + * import { constants } from 'fs'; + * + * try { + * await access('/etc/passwd', constants.R_OK | constants.W_OK); + * console.log('can access'); + * } catch { + * console.error('cannot access'); + * } + * ``` + * + * Using `fsPromises.access()` to check for the accessibility of a file before + * calling `fsPromises.open()` is not recommended. Doing so introduces a race + * condition, since other processes may change the file's state between the two + * calls. Instead, user code should open/read/write the file directly and handle + * the error raised if the file is not accessible. + * @since v10.0.0 + * @param [mode=fs.constants.F_OK] + * @return Fulfills with `undefined` upon success. + */ + function access(path: PathLike, mode?: number): Promise; + /** + * Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it + * already exists. + * + * No guarantees are made about the atomicity of the copy operation. If an + * error occurs after the destination file has been opened for writing, an attempt + * will be made to remove the destination. + * + * ```js + * import { constants } from 'fs'; + * import { copyFile } from 'fs/promises'; + * + * try { + * await copyFile('source.txt', 'destination.txt'); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.log('The file could not be copied'); + * } + * + * // By using COPYFILE_EXCL, the operation will fail if destination.txt exists. + * try { + * await copyFile('source.txt', 'destination.txt', constants.COPYFILE_EXCL); + * console.log('source.txt was copied to destination.txt'); + * } catch { + * console.log('The file could not be copied'); + * } + * ``` + * @since v10.0.0 + * @param src source filename to copy + * @param dest destination filename of the copy operation + * @param [mode=0] Optional modifiers that specify the behavior of the copy operation. It is possible to create a mask consisting of the bitwise OR of two or more values (e.g. + * `fs.constants.COPYFILE_EXCL | fs.constants.COPYFILE_FICLONE`) + * @return Fulfills with `undefined` upon success. + */ + function copyFile(src: PathLike, dest: PathLike, mode?: number): Promise; + /** + * Opens a `FileHandle`. + * + * Refer to the POSIX [`open(2)`](http://man7.org/linux/man-pages/man2/open.2.html) documentation for more detail. + * + * Some characters (`< > : " / \ | ? *`) are reserved under Windows as documented + * by [Naming Files, Paths, and Namespaces](https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file). Under NTFS, if the filename contains + * a colon, Node.js will open a file system stream, as described by [this MSDN page](https://docs.microsoft.com/en-us/windows/desktop/FileIO/using-streams). + * @since v10.0.0 + * @param [flags='r'] See `support of file system `flags``. + * @param [mode=0o666] Sets the file mode (permission and sticky bits) if the file is created. + * @return Fulfills with a {FileHandle} object. + */ + function open(path: PathLike, flags?: string | number, mode?: Mode): Promise; + /** + * Renames `oldPath` to `newPath`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rename(oldPath: PathLike, newPath: PathLike): Promise; + /** + * Truncates (shortens or extends the length) of the content at `path` to `len`bytes. + * @since v10.0.0 + * @param [len=0] + * @return Fulfills with `undefined` upon success. + */ + function truncate(path: PathLike, len?: number): Promise; + /** + * Removes the directory identified by `path`. + * + * Using `fsPromises.rmdir()` on a file (not a directory) results in the + * promise being rejected with an `ENOENT` error on Windows and an `ENOTDIR`error on POSIX. + * + * To get a behavior similar to the `rm -rf` Unix command, use `fsPromises.rm()` with options `{ recursive: true, force: true }`. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function rmdir(path: PathLike, options?: RmDirOptions): Promise; + /** + * Removes files and directories (modeled on the standard POSIX `rm` utility). + * @since v14.14.0 + * @return Fulfills with `undefined` upon success. + */ + function rm(path: PathLike, options?: RmOptions): Promise; + /** + * Asynchronously creates a directory. + * + * The optional `options` argument can be an integer specifying `mode` (permission + * and sticky bits), or an object with a `mode` property and a `recursive`property indicating whether parent directories should be created. Calling`fsPromises.mkdir()` when `path` is a directory + * that exists results in a + * rejection only when `recursive` is false. + * @since v10.0.0 + * @return Upon success, fulfills with `undefined` if `recursive` is `false`, or the first directory path created if `recursive` is `true`. + */ + function mkdir( + path: PathLike, + options: MakeDirectoryOptions & { + recursive: true; + } + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir( + path: PathLike, + options?: + | Mode + | (MakeDirectoryOptions & { + recursive?: false | undefined; + }) + | null + ): Promise; + /** + * Asynchronous mkdir(2) - create a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options Either the file mode, or an object optionally specifying the file mode and whether parent folders + * should be created. If a string is passed, it is parsed as an octal integer. If not specified, defaults to `0o777`. + */ + function mkdir(path: PathLike, options?: Mode | MakeDirectoryOptions | null): Promise; + /** + * Reads the contents of a directory. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the filenames. If the `encoding` is set to `'buffer'`, the filenames returned + * will be passed as `Buffer` objects. + * + * If `options.withFileTypes` is set to `true`, the resolved array will contain `fs.Dirent` objects. + * + * ```js + * import { readdir } from 'fs/promises'; + * + * try { + * const files = await readdir(path); + * for (const file of files) + * console.log(file); + * } catch (err) { + * console.error(err); + * } + * ``` + * @since v10.0.0 + * @return Fulfills with an array of the names of the files in the directory excluding `'.'` and `'..'`. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options: + | { + encoding: 'buffer'; + withFileTypes?: false | undefined; + } + | 'buffer' + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readdir( + path: PathLike, + options?: + | (ObjectEncodingOptions & { + withFileTypes?: false | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronous readdir(3) - read a directory. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options If called with `withFileTypes: true` the result data will be an array of Dirent. + */ + function readdir( + path: PathLike, + options: ObjectEncodingOptions & { + withFileTypes: true; + } + ): Promise; + /** + * Reads the contents of the symbolic link referred to by `path`. See the POSIX [`readlink(2)`](http://man7.org/linux/man-pages/man2/readlink.2.html) documentation for more detail. The promise is + * resolved with the`linkString` upon success. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the link path returned. If the `encoding` is set to `'buffer'`, the link path + * returned will be passed as a `Buffer` object. + * @since v10.0.0 + * @return Fulfills with the `linkString` upon success. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous readlink(2) - read value of a symbolic link. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function readlink(path: PathLike, options?: ObjectEncodingOptions | string | null): Promise; + /** + * Creates a symbolic link. + * + * The `type` argument is only used on Windows platforms and can be one of `'dir'`,`'file'`, or `'junction'`. Windows junction points require the destination path + * to be absolute. When using `'junction'`, the `target` argument will + * automatically be normalized to absolute path. + * @since v10.0.0 + * @param [type='file'] + * @return Fulfills with `undefined` upon success. + */ + function symlink(target: PathLike, path: PathLike, type?: string | null): Promise; + /** + * Equivalent to `fsPromises.stat()` unless `path` refers to a symbolic link, + * in which case the link itself is stat-ed, not the file that it refers to. + * Refer to the POSIX [`lstat(2)`](http://man7.org/linux/man-pages/man2/lstat.2.html) document for more detail. + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given symbolic link `path`. + */ + function lstat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function lstat( + path: PathLike, + opts: StatOptions & { + bigint: true; + } + ): Promise; + function lstat(path: PathLike, opts?: StatOptions): Promise; + /** + * @since v10.0.0 + * @return Fulfills with the {fs.Stats} object for the given `path`. + */ + function stat( + path: PathLike, + opts?: StatOptions & { + bigint?: false | undefined; + } + ): Promise; + function stat( + path: PathLike, + opts: StatOptions & { + bigint: true; + } + ): Promise; + function stat(path: PathLike, opts?: StatOptions): Promise; + /** + * Creates a new link from the `existingPath` to the `newPath`. See the POSIX [`link(2)`](http://man7.org/linux/man-pages/man2/link.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function link(existingPath: PathLike, newPath: PathLike): Promise; + /** + * If `path` refers to a symbolic link, then the link is removed without affecting + * the file or directory to which that link refers. If the `path` refers to a file + * path that is not a symbolic link, the file is deleted. See the POSIX [`unlink(2)`](http://man7.org/linux/man-pages/man2/unlink.2.html) documentation for more detail. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function unlink(path: PathLike): Promise; + /** + * Changes the permissions of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the permissions on a symbolic link. + * + * This method is only implemented on macOS. + * @deprecated Since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchmod(path: PathLike, mode: Mode): Promise; + /** + * Changes the ownership on a symbolic link. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function lchown(path: PathLike, uid: number, gid: number): Promise; + /** + * Changes the access and modification times of a file in the same way as `fsPromises.utimes()`, with the difference that if the path refers to a + * symbolic link, then the link is not dereferenced: instead, the timestamps of + * the symbolic link itself are changed. + * @since v14.5.0, v12.19.0 + * @return Fulfills with `undefined` upon success. + */ + function lutimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Changes the ownership of a file. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function chown(path: PathLike, uid: number, gid: number): Promise; + /** + * Change the file system timestamps of the object referenced by `path`. + * + * The `atime` and `mtime` arguments follow these rules: + * + * * Values can be either numbers representing Unix epoch time, `Date`s, or a + * numeric string like `'123456789.0'`. + * * If the value can not be converted to a number, or is `NaN`, `Infinity` or`-Infinity`, an `Error` will be thrown. + * @since v10.0.0 + * @return Fulfills with `undefined` upon success. + */ + function utimes(path: PathLike, atime: TimeLike, mtime: TimeLike): Promise; + /** + * Determines the actual location of `path` using the same semantics as the`fs.realpath.native()` function. + * + * Only paths that can be converted to UTF8 strings are supported. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use for + * the path. If the `encoding` is set to `'buffer'`, the path returned will be + * passed as a `Buffer` object. + * + * On Linux, when Node.js is linked against musl libc, the procfs file system must + * be mounted on `/proc` in order for this function to work. Glibc does not have + * this restriction. + * @since v10.0.0 + * @return Fulfills with the resolved path upon success. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options: BufferEncodingOption): Promise; + /** + * Asynchronous realpath(3) - return the canonicalized absolute pathname. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function realpath(path: PathLike, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Creates a unique temporary directory. A unique directory name is generated by + * appending six random characters to the end of the provided `prefix`. Due to + * platform inconsistencies, avoid trailing `X` characters in `prefix`. Some + * platforms, notably the BSDs, can return more than six random characters, and + * replace trailing `X` characters in `prefix` with random characters. + * + * The optional `options` argument can be a string specifying an encoding, or an + * object with an `encoding` property specifying the character encoding to use. + * + * ```js + * import { mkdtemp } from 'fs/promises'; + * + * try { + * await mkdtemp(path.join(os.tmpdir(), 'foo-')); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * The `fsPromises.mkdtemp()` method will append the six randomly selected + * characters directly to the `prefix` string. For instance, given a directory`/tmp`, if the intention is to create a temporary directory _within_`/tmp`, the`prefix` must end with a trailing + * platform-specific path separator + * (`require('path').sep`). + * @since v10.0.0 + * @return Fulfills with a string containing the filesystem path of the newly created temporary directory. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options: BufferEncodingOption): Promise; + /** + * Asynchronously creates a unique temporary directory. + * Generates six random characters to be appended behind a required `prefix` to create a unique temporary directory. + * @param options The encoding (or an object specifying the encoding), used as the encoding of the result. If not provided, `'utf8'` is used. + */ + function mkdtemp(prefix: string, options?: ObjectEncodingOptions | BufferEncoding | null): Promise; + /** + * Asynchronously writes data to a file, replacing the file if it already exists.`data` can be a string, a buffer, an + * [AsyncIterable](https://tc39.github.io/ecma262/#sec-asynciterable-interface) or + * [Iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#The_iterable_protocol) object. + * + * The `encoding` option is ignored if `data` is a buffer. + * + * If `options` is a string, then it specifies the encoding. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * Any specified `FileHandle` has to support writing. + * + * It is unsafe to use `fsPromises.writeFile()` multiple times on the same file + * without waiting for the promise to be settled. + * + * Similarly to `fsPromises.readFile` \- `fsPromises.writeFile` is a convenience + * method that performs multiple `write` calls internally to write the buffer + * passed to it. For performance sensitive code consider using `fs.createWriteStream()` or `filehandle.createWriteStream()`. + * + * It is possible to use an `AbortSignal` to cancel an `fsPromises.writeFile()`. + * Cancelation is "best effort", and some amount of data is likely still + * to be written. + * + * ```js + * import { writeFile } from 'fs/promises'; + * import { Buffer } from 'buffer'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const data = new Uint8Array(Buffer.from('Hello Node.js')); + * const promise = writeFile('message.txt', data, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.writeFile` performs. + * @since v10.0.0 + * @param file filename or `FileHandle` + * @return Fulfills with `undefined` upon success. + */ + function writeFile( + file: PathLike | FileHandle, + data: string | NodeJS.ArrayBufferView | Iterable | AsyncIterable | Stream, + options?: + | (ObjectEncodingOptions & { + mode?: Mode | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronously append data to a file, creating the file if it does not yet + * exist. `data` can be a string or a `Buffer`. + * + * If `options` is a string, then it specifies the `encoding`. + * + * The `mode` option only affects the newly created file. See `fs.open()` for more details. + * + * The `path` may be specified as a `FileHandle` that has been opened + * for appending (using `fsPromises.open()`). + * @since v10.0.0 + * @param path filename or {FileHandle} + * @return Fulfills with `undefined` upon success. + */ + function appendFile(path: PathLike | FileHandle, data: string | Uint8Array, options?: (ObjectEncodingOptions & FlagAndOpenMode) | BufferEncoding | null): Promise; + /** + * Asynchronously reads the entire contents of a file. + * + * If no encoding is specified (using `options.encoding`), the data is returned + * as a `Buffer` object. Otherwise, the data will be a string. + * + * If `options` is a string, then it specifies the encoding. + * + * When the `path` is a directory, the behavior of `fsPromises.readFile()` is + * platform-specific. On macOS, Linux, and Windows, the promise will be rejected + * with an error. On FreeBSD, a representation of the directory's contents will be + * returned. + * + * It is possible to abort an ongoing `readFile` using an `AbortSignal`. If a + * request is aborted the promise returned is rejected with an `AbortError`: + * + * ```js + * import { readFile } from 'fs/promises'; + * + * try { + * const controller = new AbortController(); + * const { signal } = controller; + * const promise = readFile(fileName, { signal }); + * + * // Abort the request before the promise settles. + * controller.abort(); + * + * await promise; + * } catch (err) { + * // When a request is aborted - err is an AbortError + * console.error(err); + * } + * ``` + * + * Aborting an ongoing request does not abort individual operating + * system requests but rather the internal buffering `fs.readFile` performs. + * + * Any specified `FileHandle` has to support reading. + * @since v10.0.0 + * @param path filename or `FileHandle` + * @return Fulfills with the contents of the file. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | ({ + encoding?: null | undefined; + flag?: OpenMode | undefined; + } & Abortable) + | null + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options: + | ({ + encoding: BufferEncoding; + flag?: OpenMode | undefined; + } & Abortable) + | BufferEncoding + ): Promise; + /** + * Asynchronously reads the entire contents of a file. + * @param path A path to a file. If a URL is provided, it must use the `file:` protocol. + * If a `FileHandle` is provided, the underlying file will _not_ be closed automatically. + * @param options An object that may contain an optional flag. + * If a flag is not provided, it defaults to `'r'`. + */ + function readFile( + path: PathLike | FileHandle, + options?: + | (ObjectEncodingOptions & + Abortable & { + flag?: OpenMode | undefined; + }) + | BufferEncoding + | null + ): Promise; + /** + * Asynchronously open a directory for iterative scanning. See the POSIX [`opendir(3)`](http://man7.org/linux/man-pages/man3/opendir.3.html) documentation for more detail. + * + * Creates an `fs.Dir`, which contains all further functions for reading from + * and cleaning up the directory. + * + * The `encoding` option sets the encoding for the `path` while opening the + * directory and subsequent read operations. + * + * Example using async iteration: + * + * ```js + * import { opendir } from 'fs/promises'; + * + * try { + * const dir = await opendir('./'); + * for await (const dirent of dir) + * console.log(dirent.name); + * } catch (err) { + * console.error(err); + * } + * ``` + * + * When using the async iterator, the `fs.Dir` object will be automatically + * closed after the iterator exits. + * @since v12.12.0 + * @return Fulfills with an {fs.Dir}. + */ + function opendir(path: PathLike, options?: OpenDirOptions): Promise; + /** + * Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory. + * + * ```js + * const { watch } = require('fs/promises'); + * + * const ac = new AbortController(); + * const { signal } = ac; + * setTimeout(() => ac.abort(), 10000); + * + * (async () => { + * try { + * const watcher = watch(__filename, { signal }); + * for await (const event of watcher) + * console.log(event); + * } catch (err) { + * if (err.name === 'AbortError') + * return; + * throw err; + * } + * })(); + * ``` + * + * On most platforms, `'rename'` is emitted whenever a filename appears or + * disappears in the directory. + * + * All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`. + * @since v15.9.0, v14.18.0 + * @return of objects with the properties: + */ + function watch( + filename: PathLike, + options: + | (WatchOptions & { + encoding: 'buffer'; + }) + | 'buffer' + ): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable>; + /** + * Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`. + * @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol. + * @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options. + * If `encoding` is not supplied, the default of `'utf8'` is used. + * If `persistent` is not supplied, the default of `true` is used. + * If `recursive` is not supplied, the default of `false` is used. + */ + function watch(filename: PathLike, options: WatchOptions | string): AsyncIterable> | AsyncIterable>; + /** + * Asynchronously copies the entire directory structure from `src` to `dest`, + * including subdirectories and files. + * + * When copying a directory to another directory, globs are not supported and + * behavior is similar to `cp dir1/ dir2/`. + * @since v16.7.0 + * @experimental + * @param src source path to copy. + * @param dest destination path to copy to. + * @return Fulfills with `undefined` upon success. + */ + function cp(source: string | URL, destination: string | URL, opts?: CopyOptions): Promise; +} +declare module 'node:fs/promises' { + export * from 'fs/promises'; +} diff --git a/node_modules/@types/node/ts4.8/globals.d.ts b/node_modules/@types/node/ts4.8/globals.d.ts new file mode 100755 index 0000000..f401d95 --- /dev/null +++ b/node_modules/@types/node/ts4.8/globals.d.ts @@ -0,0 +1,294 @@ +// Declare "static" methods in Error +interface ErrorConstructor { + /** Create .stack property on a target object */ + captureStackTrace(targetObject: object, constructorOpt?: Function): void; + + /** + * Optional override for formatting stack traces + * + * @see https://v8.dev/docs/stack-trace-api#customizing-stack-traces + */ + prepareStackTrace?: ((err: Error, stackTraces: NodeJS.CallSite[]) => any) | undefined; + + stackTraceLimit: number; +} + +/*-----------------------------------------------* + * * + * GLOBAL * + * * + ------------------------------------------------*/ + +// For backwards compability +interface NodeRequire extends NodeJS.Require { } +interface RequireResolve extends NodeJS.RequireResolve { } +interface NodeModule extends NodeJS.Module { } + +declare var process: NodeJS.Process; +declare var console: Console; + +declare var __filename: string; +declare var __dirname: string; + +declare var require: NodeRequire; +declare var module: NodeModule; + +// Same as module.exports +declare var exports: any; + +/** + * Only available if `--expose-gc` is passed to the process. + */ +declare var gc: undefined | (() => void); + +//#region borrowed +// from https://github.com/microsoft/TypeScript/blob/38da7c600c83e7b31193a62495239a0fe478cb67/lib/lib.webworker.d.ts#L633 until moved to separate lib +/** A controller object that allows you to abort one or more DOM requests as and when desired. */ +interface AbortController { + /** + * Returns the AbortSignal object associated with this object. + */ + + readonly signal: AbortSignal; + /** + * Invoking this method will set this object's AbortSignal's aborted flag and signal to any observers that the associated activity is to be aborted. + */ + abort(): void; +} + +/** A signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */ +interface AbortSignal extends EventTarget { + /** + * Returns true if this AbortSignal's AbortController has signaled to abort, and false otherwise. + */ + readonly aborted: boolean; +} + +declare var AbortController: { + prototype: AbortController; + new(): AbortController; +}; + +declare var AbortSignal: { + prototype: AbortSignal; + new(): AbortSignal; + // TODO: Add abort() static +}; +//#endregion borrowed + +//#region ArrayLike.at() +interface RelativeIndexable { + /** + * Takes an integer value and returns the item at that index, + * allowing for positive and negative integers. + * Negative integers count back from the last item in the array. + */ + at(index: number): T | undefined; +} +interface String extends RelativeIndexable {} +interface Array extends RelativeIndexable {} +interface Int8Array extends RelativeIndexable {} +interface Uint8Array extends RelativeIndexable {} +interface Uint8ClampedArray extends RelativeIndexable {} +interface Int16Array extends RelativeIndexable {} +interface Uint16Array extends RelativeIndexable {} +interface Int32Array extends RelativeIndexable {} +interface Uint32Array extends RelativeIndexable {} +interface Float32Array extends RelativeIndexable {} +interface Float64Array extends RelativeIndexable {} +interface BigInt64Array extends RelativeIndexable {} +interface BigUint64Array extends RelativeIndexable {} +//#endregion ArrayLike.at() end + +/** + * @since v17.0.0 + * + * Creates a deep clone of an object. + */ +declare function structuredClone( + value: T, + transfer?: { transfer: ReadonlyArray }, +): T; + +/*----------------------------------------------* +* * +* GLOBAL INTERFACES * +* * +*-----------------------------------------------*/ +declare namespace NodeJS { + interface CallSite { + /** + * Value of "this" + */ + getThis(): unknown; + + /** + * Type of "this" as a string. + * This is the name of the function stored in the constructor field of + * "this", if available. Otherwise the object's [[Class]] internal + * property. + */ + getTypeName(): string | null; + + /** + * Current function + */ + getFunction(): Function | undefined; + + /** + * Name of the current function, typically its name property. + * If a name property is not available an attempt will be made to try + * to infer a name from the function's context. + */ + getFunctionName(): string | null; + + /** + * Name of the property [of "this" or one of its prototypes] that holds + * the current function + */ + getMethodName(): string | null; + + /** + * Name of the script [if this function was defined in a script] + */ + getFileName(): string | null; + + /** + * Current line number [if this function was defined in a script] + */ + getLineNumber(): number | null; + + /** + * Current column number [if this function was defined in a script] + */ + getColumnNumber(): number | null; + + /** + * A call site object representing the location where eval was called + * [if this function was created using a call to eval] + */ + getEvalOrigin(): string | undefined; + + /** + * Is this a toplevel invocation, that is, is "this" the global object? + */ + isToplevel(): boolean; + + /** + * Does this call take place in code defined by a call to eval? + */ + isEval(): boolean; + + /** + * Is this call in native V8 code? + */ + isNative(): boolean; + + /** + * Is this a constructor call? + */ + isConstructor(): boolean; + } + + interface ErrnoException extends Error { + errno?: number | undefined; + code?: string | undefined; + path?: string | undefined; + syscall?: string | undefined; + } + + interface ReadableStream extends EventEmitter { + readable: boolean; + read(size?: number): string | Buffer; + setEncoding(encoding: BufferEncoding): this; + pause(): this; + resume(): this; + isPaused(): boolean; + pipe(destination: T, options?: { end?: boolean | undefined; }): T; + unpipe(destination?: WritableStream): this; + unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; + wrap(oldStream: ReadableStream): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + + interface WritableStream extends EventEmitter { + writable: boolean; + write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; + write(str: string, encoding?: BufferEncoding, cb?: (err?: Error | null) => void): boolean; + end(cb?: () => void): this; + end(data: string | Uint8Array, cb?: () => void): this; + end(str: string, encoding?: BufferEncoding, cb?: () => void): this; + } + + interface ReadWriteStream extends ReadableStream, WritableStream { } + + interface RefCounted { + ref(): this; + unref(): this; + } + + type TypedArray = + | Uint8Array + | Uint8ClampedArray + | Uint16Array + | Uint32Array + | Int8Array + | Int16Array + | Int32Array + | BigUint64Array + | BigInt64Array + | Float32Array + | Float64Array; + type ArrayBufferView = TypedArray | DataView; + + interface Require { + (id: string): any; + resolve: RequireResolve; + cache: Dict; + /** + * @deprecated + */ + extensions: RequireExtensions; + main: Module | undefined; + } + + interface RequireResolve { + (id: string, options?: { paths?: string[] | undefined; }): string; + paths(request: string): string[] | null; + } + + interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { + '.js': (m: Module, filename: string) => any; + '.json': (m: Module, filename: string) => any; + '.node': (m: Module, filename: string) => any; + } + interface Module { + /** + * `true` if the module is running during the Node.js preload + */ + isPreloading: boolean; + exports: any; + require: Require; + id: string; + filename: string; + loaded: boolean; + /** @deprecated since v14.6.0 Please use `require.main` and `module.children` instead. */ + parent: Module | null | undefined; + children: Module[]; + /** + * @since v11.14.0 + * + * The directory name of the module. This is usually the same as the path.dirname() of the module.id. + */ + path: string; + paths: string[]; + } + + interface Dict { + [key: string]: T | undefined; + } + + interface ReadOnlyDict { + readonly [key: string]: T | undefined; + } +} diff --git a/node_modules/@types/node/ts4.8/globals.global.d.ts b/node_modules/@types/node/ts4.8/globals.global.d.ts new file mode 100755 index 0000000..ef1198c --- /dev/null +++ b/node_modules/@types/node/ts4.8/globals.global.d.ts @@ -0,0 +1 @@ +declare var global: typeof globalThis; diff --git a/node_modules/@types/node/ts4.8/http.d.ts b/node_modules/@types/node/ts4.8/http.d.ts new file mode 100755 index 0000000..9876c4f --- /dev/null +++ b/node_modules/@types/node/ts4.8/http.d.ts @@ -0,0 +1,1607 @@ +/** + * To use the HTTP server and client one must `require('http')`. + * + * The HTTP interfaces in Node.js are designed to support many features + * of the protocol which have been traditionally difficult to use. + * In particular, large, possibly chunk-encoded, messages. The interface is + * careful to never buffer entire requests or responses, so the + * user is able to stream data. + * + * HTTP message headers are represented by an object like this: + * + * ```js + * { 'content-length': '123', + * 'content-type': 'text/plain', + * 'connection': 'keep-alive', + * 'host': 'example.com', + * 'accept': '*' } + * ``` + * + * Keys are lowercased. Values are not modified. + * + * In order to support the full spectrum of possible HTTP applications, the Node.js + * HTTP API is very low-level. It deals with stream handling and message + * parsing only. It parses a message into headers and body but it does not + * parse the actual headers or the body. + * + * See `message.headers` for details on how duplicate headers are handled. + * + * The raw headers as they were received are retained in the `rawHeaders`property, which is an array of `[key, value, key2, value2, ...]`. For + * example, the previous message header object might have a `rawHeaders`list like the following: + * + * ```js + * [ 'ConTent-Length', '123456', + * 'content-LENGTH', '123', + * 'content-type', 'text/plain', + * 'CONNECTION', 'keep-alive', + * 'Host', 'example.com', + * 'accepT', '*' ] + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/http.js) + */ +declare module 'http' { + import * as stream from 'node:stream'; + import { URL } from 'node:url'; + import { TcpSocketConnectOpts, Socket, Server as NetServer, LookupFunction } from 'node:net'; + // incoming headers will never contain number + interface IncomingHttpHeaders extends NodeJS.Dict { + accept?: string | undefined; + 'accept-language'?: string | undefined; + 'accept-patch'?: string | undefined; + 'accept-ranges'?: string | undefined; + 'access-control-allow-credentials'?: string | undefined; + 'access-control-allow-headers'?: string | undefined; + 'access-control-allow-methods'?: string | undefined; + 'access-control-allow-origin'?: string | undefined; + 'access-control-expose-headers'?: string | undefined; + 'access-control-max-age'?: string | undefined; + 'access-control-request-headers'?: string | undefined; + 'access-control-request-method'?: string | undefined; + age?: string | undefined; + allow?: string | undefined; + 'alt-svc'?: string | undefined; + authorization?: string | undefined; + 'cache-control'?: string | undefined; + connection?: string | undefined; + 'content-disposition'?: string | undefined; + 'content-encoding'?: string | undefined; + 'content-language'?: string | undefined; + 'content-length'?: string | undefined; + 'content-location'?: string | undefined; + 'content-range'?: string | undefined; + 'content-type'?: string | undefined; + cookie?: string | undefined; + date?: string | undefined; + etag?: string | undefined; + expect?: string | undefined; + expires?: string | undefined; + forwarded?: string | undefined; + from?: string | undefined; + host?: string | undefined; + 'if-match'?: string | undefined; + 'if-modified-since'?: string | undefined; + 'if-none-match'?: string | undefined; + 'if-unmodified-since'?: string | undefined; + 'last-modified'?: string | undefined; + location?: string | undefined; + origin?: string | undefined; + pragma?: string | undefined; + 'proxy-authenticate'?: string | undefined; + 'proxy-authorization'?: string | undefined; + 'public-key-pins'?: string | undefined; + range?: string | undefined; + referer?: string | undefined; + 'retry-after'?: string | undefined; + 'sec-websocket-accept'?: string | undefined; + 'sec-websocket-extensions'?: string | undefined; + 'sec-websocket-key'?: string | undefined; + 'sec-websocket-protocol'?: string | undefined; + 'sec-websocket-version'?: string | undefined; + 'set-cookie'?: string[] | undefined; + 'strict-transport-security'?: string | undefined; + tk?: string | undefined; + trailer?: string | undefined; + 'transfer-encoding'?: string | undefined; + upgrade?: string | undefined; + 'user-agent'?: string | undefined; + vary?: string | undefined; + via?: string | undefined; + warning?: string | undefined; + 'www-authenticate'?: string | undefined; + } + // outgoing headers allows numbers (as they are converted internally to strings) + type OutgoingHttpHeader = number | string | string[]; + interface OutgoingHttpHeaders extends NodeJS.Dict {} + interface ClientRequestArgs { + signal?: AbortSignal | undefined; + protocol?: string | null | undefined; + host?: string | null | undefined; + hostname?: string | null | undefined; + family?: number | undefined; + port?: number | string | null | undefined; + defaultPort?: number | string | undefined; + localAddress?: string | undefined; + socketPath?: string | undefined; + /** + * @default 8192 + */ + maxHeaderSize?: number | undefined; + method?: string | undefined; + path?: string | null | undefined; + headers?: OutgoingHttpHeaders | undefined; + auth?: string | null | undefined; + agent?: Agent | boolean | undefined; + _defaultAgent?: Agent | undefined; + timeout?: number | undefined; + setHost?: boolean | undefined; + // https://github.com/nodejs/node/blob/master/lib/_http_client.js#L278 + createConnection?: + | ((options: ClientRequestArgs, oncreate: (err: Error, socket: Socket) => void) => Socket) + | undefined; + lookup?: LookupFunction | undefined; + } + interface ServerOptions< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > { + IncomingMessage?: Request | undefined; + ServerResponse?: Response | undefined; + /** + * Optionally overrides the value of + * `--max-http-header-size` for requests received by this server, i.e. + * the maximum length of request headers in bytes. + * @default 8192 + */ + maxHeaderSize?: number | undefined; + /** + * Use an insecure HTTP parser that accepts invalid HTTP headers when true. + * Using the insecure parser should be avoided. + * See --insecure-http-parser for more information. + * @default false + */ + insecureHTTPParser?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + type RequestListener< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > = (req: InstanceType, res: InstanceType & { req: InstanceType }) => void; + /** + * @since v0.1.17 + */ + class Server< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + > extends NetServer { + constructor(requestListener?: RequestListener); + constructor(options: ServerOptions, requestListener?: RequestListener); + /** + * Sets the timeout value for sockets, and emits a `'timeout'` event on + * the Server object, passing the socket as an argument, if a timeout + * occurs. + * + * If there is a `'timeout'` event listener on the Server object, then it + * will be called with the timed-out socket as an argument. + * + * By default, the Server does not timeout sockets. However, if a callback + * is assigned to the Server's `'timeout'` event, timeouts must be handled + * explicitly. + * @since v0.9.12 + * @param [msecs=0 (no timeout)] + */ + setTimeout(msecs?: number, callback?: () => void): this; + setTimeout(callback: () => void): this; + /** + * Limits maximum incoming headers count. If set to 0, no limit will be applied. + * @since v0.7.0 + */ + maxHeadersCount: number | null; + /** + * The maximum number of requests socket can handle + * before closing keep alive connection. + * + * A value of `0` will disable the limit. + * + * When the limit is reached it will set the `Connection` header value to `close`, + * but will not actually close the connection, subsequent requests sent + * after the limit is reached will get `503 Service Unavailable` as a response. + * @since v16.10.0 + */ + maxRequestsPerSocket: number | null; + /** + * The number of milliseconds of inactivity before a socket is presumed + * to have timed out. + * + * A value of `0` will disable the timeout behavior on incoming connections. + * + * The socket timeout logic is set up on connection, so changing this + * value only affects new connections to the server, not any existing connections. + * @since v0.9.12 + */ + timeout: number; + /** + * Limit the amount of time the parser will wait to receive the complete HTTP + * headers. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v11.3.0, v10.14.0 + */ + headersTimeout: number; + /** + * The number of milliseconds of inactivity a server needs to wait for additional + * incoming data, after it has finished writing the last response, before a socket + * will be destroyed. If the server receives new data before the keep-alive + * timeout has fired, it will reset the regular inactivity timeout, i.e.,`server.timeout`. + * + * A value of `0` will disable the keep-alive timeout behavior on incoming + * connections. + * A value of `0` makes the http server behave similarly to Node.js versions prior + * to 8.0.0, which did not have a keep-alive timeout. + * + * The socket timeout logic is set up on connection, so changing this value only + * affects new connections to the server, not any existing connections. + * @since v8.0.0 + */ + keepAliveTimeout: number; + /** + * Sets the timeout value in milliseconds for receiving the entire request from + * the client. + * + * If the timeout expires, the server responds with status 408 without + * forwarding the request to the request listener and then closes the connection. + * + * It must be set to a non-zero value (e.g. 120 seconds) to protect against + * potential Denial-of-Service attacks in case the server is deployed without a + * reverse proxy in front. + * @since v14.11.0 + */ + requestTimeout: number; + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connection', listener: (socket: Socket) => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'checkContinue', listener: RequestListener): this; + addListener(event: 'checkExpectation', listener: RequestListener): this; + addListener(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + addListener( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + addListener(event: 'request', listener: RequestListener): this; + addListener( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'connection', socket: Socket): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit( + event: 'checkContinue', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: 'checkExpectation', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'clientError', err: Error, socket: stream.Duplex): boolean; + emit(event: 'connect', req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + emit( + event: 'request', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'upgrade', req: InstanceType, socket: stream.Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connection', listener: (socket: Socket) => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'checkContinue', listener: RequestListener): this; + on(event: 'checkExpectation', listener: RequestListener): this; + on(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + on(event: 'connect', listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + on(event: 'request', listener: RequestListener): this; + on(event: 'upgrade', listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connection', listener: (socket: Socket) => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'checkContinue', listener: RequestListener): this; + once(event: 'checkExpectation', listener: RequestListener): this; + once(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + once( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + once(event: 'request', listener: RequestListener): this; + once( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connection', listener: (socket: Socket) => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'checkContinue', listener: RequestListener): this; + prependListener(event: 'checkExpectation', listener: RequestListener): this; + prependListener(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + prependListener( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependListener(event: 'request', listener: RequestListener): this; + prependListener( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connection', listener: (socket: Socket) => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'checkContinue', listener: RequestListener): this; + prependOnceListener(event: 'checkExpectation', listener: RequestListener): this; + prependOnceListener(event: 'clientError', listener: (err: Error, socket: stream.Duplex) => void): this; + prependOnceListener( + event: 'connect', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: 'request', listener: RequestListener): this; + prependOnceListener( + event: 'upgrade', + listener: (req: InstanceType, socket: stream.Duplex, head: Buffer) => void, + ): this; + } + /** + * This class serves as the parent class of {@link ClientRequest} and {@link ServerResponse}. It is an abstract of outgoing message from + * the perspective of the participants of HTTP transaction. + * @since v0.1.17 + */ + class OutgoingMessage extends stream.Writable { + readonly req: Request; + chunkedEncoding: boolean; + shouldKeepAlive: boolean; + useChunkedEncodingByDefault: boolean; + sendDate: boolean; + /** + * @deprecated Use `writableEnded` instead. + */ + finished: boolean; + /** + * Read-only. `true` if the headers were sent, otherwise `false`. + * @since v0.9.3 + */ + readonly headersSent: boolean; + /** + * Aliases of `outgoingMessage.socket` + * @since v0.3.0 + * @deprecated Since v15.12.0,v14.17.1 - Use `socket` instead. + */ + readonly connection: Socket | null; + /** + * Reference to the underlying socket. Usually, users will not want to access + * this property. + * + * After calling `outgoingMessage.end()`, this property will be nulled. + * @since v0.3.0 + */ + readonly socket: Socket | null; + constructor(); + /** + * Once a socket is associated with the message and is connected,`socket.setTimeout()` will be called with `msecs` as the first parameter. + * @since v0.9.12 + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `timeout` event. + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * Sets a single header value for the header object. + * @since v0.4.0 + * @param name Header name + * @param value Header value + */ + setHeader(name: string, value: number | string | ReadonlyArray): this; + /** + * Gets the value of HTTP header with the given name. If such a name doesn't + * exist in message, it will be `undefined`. + * @since v0.4.0 + * @param name Name of header + */ + getHeader(name: string): number | string | string[] | undefined; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow + * copy is used, array values may be mutated without additional calls to + * various header-related HTTP module methods. The keys of the returned + * object are the header names and the values are the respective header + * values. All header names are lowercase. + * + * The object returned by the `outgoingMessage.getHeaders()` method does + * not prototypically inherit from the JavaScript Object. This means that + * typical Object methods such as `obj.toString()`, `obj.hasOwnProperty()`, + * and others are not defined and will not work. + * + * ```js + * outgoingMessage.setHeader('Foo', 'bar'); + * outgoingMessage.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = outgoingMessage.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v7.7.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns an array of names of headers of the outgoing outgoingMessage. All + * names are lowercase. + * @since v7.7.0 + */ + getHeaderNames(): string[]; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name is case-insensitive. + * + * ```js + * const hasContentType = outgoingMessage.hasHeader('content-type'); + * ``` + * @since v7.7.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that is queued for implicit sending. + * + * ```js + * outgoingMessage.removeHeader('Content-Encoding'); + * ``` + * @since v0.4.0 + * @param name Header name + */ + removeHeader(name: string): void; + /** + * Adds HTTP trailers (headers but at the end of the message) to the message. + * + * Trailers are **only** be emitted if the message is chunked encoded. If not, + * the trailer will be silently discarded. + * + * HTTP requires the `Trailer` header to be sent to emit trailers, + * with a list of header fields in its value, e.g. + * + * ```js + * message.writeHead(200, { 'Content-Type': 'text/plain', + * 'Trailer': 'Content-MD5' }); + * message.write(fileData); + * message.addTrailers({ 'Content-MD5': '7895bf4b8828b55ceaf47747b4bca667' }); + * message.end(); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.3.0 + */ + addTrailers(headers: OutgoingHttpHeaders | ReadonlyArray<[string, string]>): void; + /** + * Compulsorily flushes the message headers + * + * For efficiency reason, Node.js normally buffers the message headers + * until `outgoingMessage.end()` is called or the first chunk of message data + * is written. It then tries to pack the headers and data into a single TCP + * packet. + * + * It is usually desired (it saves a TCP round-trip), but not when the first + * data is not sent until possibly much later. `outgoingMessage.flushHeaders()`bypasses the optimization and kickstarts the request. + * @since v1.6.0 + */ + flushHeaders(): void; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v0.1.17 + */ + class ServerResponse extends OutgoingMessage { + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v0.4.0 + */ + statusCode: number; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status message that will be sent to the client when + * the headers get flushed. If this is left as `undefined` then the standard + * message for the status code will be used. + * + * ```js + * response.statusMessage = 'Not found'; + * ``` + * + * After response header was sent to the client, this property indicates the + * status message which was sent out. + * @since v0.11.8 + */ + statusMessage: string; + constructor(req: Request); + assignSocket(socket: Socket): void; + detachSocket(socket: Socket): void; + /** + * Sends an HTTP/1.1 100 Continue message to the client, indicating that + * the request body should be sent. See the `'checkContinue'` event on`Server`. + * @since v0.3.0 + */ + writeContinue(callback?: () => void): void; + /** + * Sends an HTTP/1.1 103 Early Hints message to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. The optional `callback` argument will be called when + * the response message has been written. + * + * Example: + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics' + * }); + * + * const earlyHintsCallback = () => console.log('early hints message sent'); + * response.writeEarlyHints({ + * 'link': earlyHintsLinks + * }, earlyHintsCallback); + * ``` + * + * @since v18.11.0 + * @param hints An object containing the values of headers + * @param callback Will be called when the response message has been written + */ + writeEarlyHints(hints: Record, callback?: () => void): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * Optionally one can give a human-readable `statusMessage` as the second + * argument. + * + * `headers` may be an `Array` where the keys and values are in the same list. + * It is _not_ a list of tuples. So, the even-numbered offsets are key values, + * and the odd-numbered offsets are the associated values. The array is in the same + * format as `request.rawHeaders`. + * + * Returns a reference to the `ServerResponse`, so that calls can be chained. + * + * ```js + * const body = 'hello world'; + * response + * .writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain' + * }) + * .end(body); + * ``` + * + * This method must only be called once on a message and it must + * be called before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * If this method is called and `response.setHeader()` has not been called, + * it will directly write the supplied header values onto the network channel + * without caching internally, and the `response.getHeader()` on the header + * will not yield the expected result. If progressive population of headers is + * desired with potential future retrieval and modification, use `response.setHeader()` instead. + * + * ```js + * // Returns content-type = text/plain + * const server = http.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain' }); + * res.end('ok'); + * }); + * ``` + * + * `Content-Length` is given in bytes, not characters. Use `Buffer.byteLength()` to determine the length of the body in bytes. Node.js + * does not check whether `Content-Length` and the length of the body which has + * been transmitted are equal or not. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v0.1.30 + */ + writeHead( + statusCode: number, + statusMessage?: string, + headers?: OutgoingHttpHeaders | OutgoingHttpHeader[], + ): this; + writeHead(statusCode: number, headers?: OutgoingHttpHeaders | OutgoingHttpHeader[]): this; + /** + * Sends an HTTP/1.1 102 Processing message to the client, indicating that + * the request body should be sent. + * @since v10.0.0 + */ + writeProcessing(): void; + } + interface InformationEvent { + statusCode: number; + statusMessage: string; + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + headers: IncomingHttpHeaders; + rawHeaders: string[]; + } + /** + * This object is created internally and returned from {@link request}. It + * represents an _in-progress_ request whose header has already been queued. The + * header is still mutable using the `setHeader(name, value)`,`getHeader(name)`, `removeHeader(name)` API. The actual header will + * be sent along with the first data chunk or when calling `request.end()`. + * + * To get the response, add a listener for `'response'` to the request object.`'response'` will be emitted from the request object when the response + * headers have been received. The `'response'` event is executed with one + * argument which is an instance of {@link IncomingMessage}. + * + * During the `'response'` event, one can add listeners to the + * response object; particularly to listen for the `'data'` event. + * + * If no `'response'` handler is added, then the response will be + * entirely discarded. However, if a `'response'` event handler is added, + * then the data from the response object **must** be consumed, either by + * calling `response.read()` whenever there is a `'readable'` event, or + * by adding a `'data'` handler, or by calling the `.resume()` method. + * Until the data is consumed, the `'end'` event will not fire. Also, until + * the data is read it will consume memory that can eventually lead to a + * 'process out of memory' error. + * + * For backward compatibility, `res` will only emit `'error'` if there is an`'error'` listener registered. + * + * Node.js does not check whether Content-Length and the length of the + * body which has been transmitted are equal or not. + * @since v0.1.17 + */ + class ClientRequest extends OutgoingMessage { + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v0.11.14 + * @deprecated Since v17.0.0,v16.12.0 - Check `destroyed` instead. + */ + aborted: boolean; + /** + * The request host. + * @since v14.5.0, v12.19.0 + */ + host: string; + /** + * The request protocol. + * @since v14.5.0, v12.19.0 + */ + protocol: string; + /** + * When sending request through a keep-alive enabled agent, the underlying socket + * might be reused. But if server closes connection at unfortunate time, client + * may run into a 'ECONNRESET' error. + * + * ```js + * const http = require('http'); + * + * // Server has a 5 seconds keep-alive timeout by default + * http + * .createServer((req, res) => { + * res.write('hello\n'); + * res.end(); + * }) + * .listen(3000); + * + * setInterval(() => { + * // Adapting a keep-alive agent + * http.get('http://localhost:3000', { agent }, (res) => { + * res.on('data', (data) => { + * // Do nothing + * }); + * }); + * }, 5000); // Sending request on 5s interval so it's easy to hit idle timeout + * ``` + * + * By marking a request whether it reused socket or not, we can do + * automatic error retry base on it. + * + * ```js + * const http = require('http'); + * const agent = new http.Agent({ keepAlive: true }); + * + * function retriableRequest() { + * const req = http + * .get('http://localhost:3000', { agent }, (res) => { + * // ... + * }) + * .on('error', (err) => { + * // Check if retry is needed + * if (req.reusedSocket && err.code === 'ECONNRESET') { + * retriableRequest(); + * } + * }); + * } + * + * retriableRequest(); + * ``` + * @since v13.0.0, v12.16.0 + */ + reusedSocket: boolean; + /** + * Limits maximum response headers count. If set to 0, no limit will be applied. + */ + maxHeadersCount: number; + constructor(url: string | URL | ClientRequestArgs, cb?: (res: IncomingMessage) => void); + /** + * The request method. + * @since v0.1.97 + */ + method: string; + /** + * The request path. + * @since v0.4.0 + */ + path: string; + /** + * Marks the request as aborting. Calling this will cause remaining data + * in the response to be dropped and the socket to be destroyed. + * @since v0.3.8 + * @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead. + */ + abort(): void; + onSocket(socket: Socket): void; + /** + * Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called. + * @since v0.5.9 + * @param timeout Milliseconds before a request times out. + * @param callback Optional function to be called when a timeout occurs. Same as binding to the `'timeout'` event. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Once a socket is assigned to this request and is connected `socket.setNoDelay()` will be called. + * @since v0.5.9 + */ + setNoDelay(noDelay?: boolean): void; + /** + * Once a socket is assigned to this request and is connected `socket.setKeepAlive()` will be called. + * @since v0.5.9 + */ + setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; + /** + * Returns an array containing the unique names of the current outgoing raw + * headers. Header names are returned with their exact casing being set. + * + * ```js + * request.setHeader('Foo', 'bar'); + * request.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = request.getRawHeaderNames(); + * // headerNames === ['Foo', 'Set-Cookie'] + * ``` + * @since v15.13.0, v14.17.0 + */ + getRawHeaderNames(): string[]; + /** + * @deprecated + */ + addListener(event: 'abort', listener: () => void): this; + addListener( + event: 'connect', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: 'continue', listener: () => void): this; + addListener(event: 'information', listener: (info: InformationEvent) => void): this; + addListener(event: 'response', listener: (response: IncomingMessage) => void): this; + addListener(event: 'socket', listener: (socket: Socket) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener( + event: 'upgrade', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + on(event: 'abort', listener: () => void): this; + on(event: 'connect', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: 'continue', listener: () => void): this; + on(event: 'information', listener: (info: InformationEvent) => void): this; + on(event: 'response', listener: (response: IncomingMessage) => void): this; + on(event: 'socket', listener: (socket: Socket) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: 'upgrade', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + once(event: 'abort', listener: () => void): this; + once(event: 'connect', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: 'continue', listener: () => void): this; + once(event: 'information', listener: (info: InformationEvent) => void): this; + once(event: 'response', listener: (response: IncomingMessage) => void): this; + once(event: 'socket', listener: (socket: Socket) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: 'upgrade', listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependListener(event: 'abort', listener: () => void): this; + prependListener( + event: 'connect', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: 'continue', listener: () => void): this; + prependListener(event: 'information', listener: (info: InformationEvent) => void): this; + prependListener(event: 'response', listener: (response: IncomingMessage) => void): this; + prependListener(event: 'socket', listener: (socket: Socket) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener( + event: 'upgrade', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + /** + * @deprecated + */ + prependOnceListener(event: 'abort', listener: () => void): this; + prependOnceListener( + event: 'connect', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: 'continue', listener: () => void): this; + prependOnceListener(event: 'information', listener: (info: InformationEvent) => void): this; + prependOnceListener(event: 'response', listener: (response: IncomingMessage) => void): this; + prependOnceListener(event: 'socket', listener: (socket: Socket) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener( + event: 'upgrade', + listener: (response: IncomingMessage, socket: Socket, head: Buffer) => void, + ): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * An `IncomingMessage` object is created by {@link Server} or {@link ClientRequest} and passed as the first argument to the `'request'` and `'response'` event respectively. It may be used to + * access response + * status, headers and data. + * + * Different from its `socket` value which is a subclass of `stream.Duplex`, the`IncomingMessage` itself extends `stream.Readable` and is created separately to + * parse and emit the incoming HTTP headers and payload, as the underlying socket + * may be reused multiple times in case of keep-alive. + * @since v0.1.17 + */ + class IncomingMessage extends stream.Readable { + constructor(socket: Socket); + /** + * The `message.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + * @deprecated Since v17.0.0,v16.12.0 - Check `message.destroyed` from stream.Readable. + */ + aborted: boolean; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. + * Probably either `'1.1'` or `'1.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v0.1.1 + */ + httpVersion: string; + httpVersionMajor: number; + httpVersionMinor: number; + /** + * The `message.complete` property will be `true` if a complete HTTP message has + * been received and successfully parsed. + * + * This property is particularly useful as a means of determining if a client or + * server fully transmitted a message before a connection was terminated: + * + * ```js + * const req = http.request({ + * host: '127.0.0.1', + * port: 8080, + * method: 'POST' + * }, (res) => { + * res.resume(); + * res.on('end', () => { + * if (!res.complete) + * console.error( + * 'The connection was terminated while the message was still being sent'); + * }); + * }); + * ``` + * @since v0.3.0 + */ + complete: boolean; + /** + * Alias for `message.socket`. + * @since v0.1.90 + * @deprecated Since v16.0.0 - Use `socket`. + */ + connection: Socket; + /** + * The `net.Socket` object associated with the connection. + * + * With HTTPS support, use `request.socket.getPeerCertificate()` to obtain the + * client's authentication details. + * + * This property is guaranteed to be an instance of the `net.Socket` class, + * a subclass of `stream.Duplex`, unless the user specified a socket + * type other than `net.Socket` or internally nulled. + * @since v0.3.0 + */ + socket: Socket; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.getHeaders()); + * ``` + * + * Duplicates in raw headers are handled in the following ways, depending on the + * header name: + * + * * Duplicates of `age`, `authorization`, `content-length`, `content-type`,`etag`, `expires`, `from`, `host`, `if-modified-since`, `if-unmodified-since`,`last-modified`, `location`, + * `max-forwards`, `proxy-authorization`, `referer`,`retry-after`, `server`, or `user-agent` are discarded. + * * `set-cookie` is always an array. Duplicates are added to the array. + * * For duplicate `cookie` headers, the values are joined together with '; '. + * * For all other headers, the values are joined together with ', '. + * @since v0.1.5 + */ + headers: IncomingHttpHeaders; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v0.11.6 + */ + rawHeaders: string[]; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v0.3.0 + */ + trailers: NodeJS.Dict; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v0.11.6 + */ + rawTrailers: string[]; + /** + * Calls `message.socket.setTimeout(msecs, callback)`. + * @since v0.5.9 + */ + setTimeout(msecs: number, callback?: () => void): this; + /** + * **Only valid for request obtained from {@link Server}.** + * + * The request method as a string. Read only. Examples: `'GET'`, `'DELETE'`. + * @since v0.1.1 + */ + method?: string | undefined; + /** + * **Only valid for request obtained from {@link Server}.** + * + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. Take the following request: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * To parse the URL into its parts: + * + * ```js + * new URL(request.url, `http://${request.getHeaders().host}`); + * ``` + * + * When `request.url` is `'/status?name=ryan'` and`request.getHeaders().host` is `'localhost:3000'`: + * + * ```console + * $ node + * > new URL(request.url, `http://${request.getHeaders().host}`) + * URL { + * href: 'http://localhost:3000/status?name=ryan', + * origin: 'http://localhost:3000', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'localhost:3000', + * hostname: 'localhost', + * port: '3000', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v0.1.90 + */ + url?: string | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The 3-digit HTTP response status code. E.G. `404`. + * @since v0.1.1 + */ + statusCode?: number | undefined; + /** + * **Only valid for response obtained from {@link ClientRequest}.** + * + * The HTTP response status message (reason phrase). E.G. `OK` or `Internal Server Error`. + * @since v0.11.10 + */ + statusMessage?: string | undefined; + /** + * Calls `destroy()` on the socket that received the `IncomingMessage`. If `error`is provided, an `'error'` event is emitted on the socket and `error` is passed + * as an argument to any listeners on the event. + * @since v0.3.0 + */ + destroy(error?: Error): this; + } + interface AgentOptions extends Partial { + /** + * Keep sockets around in a pool to be used by other requests in the future. Default = false + */ + keepAlive?: boolean | undefined; + /** + * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. + * Only relevant if keepAlive is set to true. + */ + keepAliveMsecs?: number | undefined; + /** + * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity + */ + maxSockets?: number | undefined; + /** + * Maximum number of sockets allowed for all hosts in total. Each request will use a new socket until the maximum is reached. Default: Infinity. + */ + maxTotalSockets?: number | undefined; + /** + * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. + */ + maxFreeSockets?: number | undefined; + /** + * Socket timeout in milliseconds. This will set the timeout after the socket is connected. + */ + timeout?: number | undefined; + /** + * Scheduling strategy to apply when picking the next free socket to use. + * @default `lifo` + */ + scheduling?: 'fifo' | 'lifo' | undefined; + } + /** + * An `Agent` is responsible for managing connection persistence + * and reuse for HTTP clients. It maintains a queue of pending requests + * for a given host and port, reusing a single socket connection for each + * until the queue is empty, at which time the socket is either destroyed + * or put into a pool where it is kept to be used again for requests to the + * same host and port. Whether it is destroyed or pooled depends on the`keepAlive` `option`. + * + * Pooled connections have TCP Keep-Alive enabled for them, but servers may + * still close idle connections, in which case they will be removed from the + * pool and a new connection will be made when a new HTTP request is made for + * that host and port. Servers may also refuse to allow multiple requests + * over the same connection, in which case the connection will have to be + * remade for every request and cannot be pooled. The `Agent` will still make + * the requests to that server, but each one will occur over a new connection. + * + * When a connection is closed by the client or the server, it is removed + * from the pool. Any unused sockets in the pool will be unrefed so as not + * to keep the Node.js process running when there are no outstanding requests. + * (see `socket.unref()`). + * + * It is good practice, to `destroy()` an `Agent` instance when it is no + * longer in use, because unused sockets consume OS resources. + * + * Sockets are removed from an agent when the socket emits either + * a `'close'` event or an `'agentRemove'` event. When intending to keep one + * HTTP request open for a long time without keeping it in the agent, something + * like the following may be done: + * + * ```js + * http.get(options, (res) => { + * // Do stuff + * }).on('socket', (socket) => { + * socket.emit('agentRemove'); + * }); + * ``` + * + * An agent may also be used for an individual request. By providing`{agent: false}` as an option to the `http.get()` or `http.request()`functions, a one-time use `Agent` with default options + * will be used + * for the client connection. + * + * `agent:false`: + * + * ```js + * http.get({ + * hostname: 'localhost', + * port: 80, + * path: '/', + * agent: false // Create a new agent just for this one request + * }, (res) => { + * // Do stuff with response + * }); + * ``` + * @since v0.3.4 + */ + class Agent { + /** + * By default set to 256\. For agents with `keepAlive` enabled, this + * sets the maximum number of sockets that will be left open in the free + * state. + * @since v0.11.7 + */ + maxFreeSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open per origin. Origin is the returned value of `agent.getName()`. + * @since v0.3.6 + */ + maxSockets: number; + /** + * By default set to `Infinity`. Determines how many concurrent sockets the agent + * can have open. Unlike `maxSockets`, this parameter applies across all origins. + * @since v14.5.0, v12.19.0 + */ + maxTotalSockets: number; + /** + * An object which contains arrays of sockets currently awaiting use by + * the agent when `keepAlive` is enabled. Do not modify. + * + * Sockets in the `freeSockets` list will be automatically destroyed and + * removed from the array on `'timeout'`. + * @since v0.11.4 + */ + readonly freeSockets: NodeJS.ReadOnlyDict; + /** + * An object which contains arrays of sockets currently in use by the + * agent. Do not modify. + * @since v0.3.6 + */ + readonly sockets: NodeJS.ReadOnlyDict; + /** + * An object which contains queues of requests that have not yet been assigned to + * sockets. Do not modify. + * @since v0.5.9 + */ + readonly requests: NodeJS.ReadOnlyDict; + constructor(opts?: AgentOptions); + /** + * Destroy any sockets that are currently in use by the agent. + * + * It is usually not necessary to do this. However, if using an + * agent with `keepAlive` enabled, then it is best to explicitly shut down + * the agent when it is no longer needed. Otherwise, + * sockets might stay open for quite a long time before the server + * terminates them. + * @since v0.11.4 + */ + destroy(): void; + } + const METHODS: string[]; + const STATUS_CODES: { + [errorCode: number]: string | undefined; + [errorCode: string]: string | undefined; + }; + /** + * Returns a new instance of {@link Server}. + * + * The `requestListener` is a function which is automatically + * added to the `'request'` event. + * @since v0.1.13 + */ + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >(requestListener?: RequestListener): Server; + function createServer< + Request extends typeof IncomingMessage = typeof IncomingMessage, + Response extends typeof ServerResponse = typeof ServerResponse, + >( + options: ServerOptions, + requestListener?: RequestListener, + ): Server; + // although RequestOptions are passed as ClientRequestArgs to ClientRequest directly, + // create interface RequestOptions would make the naming more clear to developers + interface RequestOptions extends ClientRequestArgs {} + /** + * `options` in `socket.connect()` are also supported. + * + * Node.js maintains several connections per server to make HTTP requests. + * This function allows one to transparently issue requests. + * + * `url` can be a string or a `URL` object. If `url` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * If both `url` and `options` are specified, the objects are merged, with the`options` properties taking precedence. + * + * The optional `callback` parameter will be added as a one-time listener for + * the `'response'` event. + * + * `http.request()` returns an instance of the {@link ClientRequest} class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const http = require('http'); + * + * const postData = JSON.stringify({ + * 'msg': 'Hello World!' + * }); + * + * const options = { + * hostname: 'www.google.com', + * port: 80, + * path: '/upload', + * method: 'POST', + * headers: { + * 'Content-Type': 'application/json', + * 'Content-Length': Buffer.byteLength(postData) + * } + * }; + * + * const req = http.request(options, (res) => { + * console.log(`STATUS: ${res.statusCode}`); + * console.log(`HEADERS: ${JSON.stringify(res.headers)}`); + * res.setEncoding('utf8'); + * res.on('data', (chunk) => { + * console.log(`BODY: ${chunk}`); + * }); + * res.on('end', () => { + * console.log('No more data in response.'); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(`problem with request: ${e.message}`); + * }); + * + * // Write data to request body + * req.write(postData); + * req.end(); + * ``` + * + * In the example `req.end()` was called. With `http.request()` one + * must always call `req.end()` to signify the end of the request - + * even if there is no data being written to the request body. + * + * If any error is encountered during the request (be that with DNS resolution, + * TCP level errors, or actual HTTP parse errors) an `'error'` event is emitted + * on the returned request object. As with all `'error'` events, if no listeners + * are registered the error will be thrown. + * + * There are a few special headers that should be noted. + * + * * Sending a 'Connection: keep-alive' will notify Node.js that the connection to + * the server should be persisted until the next request. + * * Sending a 'Content-Length' header will disable the default chunked encoding. + * * Sending an 'Expect' header will immediately send the request headers. + * Usually, when sending 'Expect: 100-continue', both a timeout and a listener + * for the `'continue'` event should be set. See RFC 2616 Section 8.2.3 for more + * information. + * * Sending an Authorization header will override using the `auth` option + * to compute basic authentication. + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('http://abc:xyz@example.com'); + * + * const req = http.request(options, (res) => { + * // ... + * }); + * ``` + * + * In a successful request, the following events will be emitted in the following + * order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * (`'data'` will not be emitted at all if the response body is empty, for + * instance, in most redirects) + * * `'end'` on the `res` object + * * `'close'` + * + * In the case of a connection error, the following events will be emitted: + * + * * `'socket'` + * * `'error'` + * * `'close'` + * + * In the case of a premature connection close before the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * In the case of a premature connection close after the response is received, + * the following events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (connection closed here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.destroy()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.destroy()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.destroy()` called here) + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.destroy()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.destroy()` called here) + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * If `req.abort()` is called before a socket is assigned, the following + * events will be emitted in the following order: + * + * * (`req.abort()` called here) + * * `'abort'` + * * `'close'` + * + * If `req.abort()` is called before the connection succeeds, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * (`req.abort()` called here) + * * `'abort'` + * * `'error'` with an error with message `'Error: socket hang up'` and code`'ECONNRESET'` + * * `'close'` + * + * If `req.abort()` is called after the response is received, the following + * events will be emitted in the following order: + * + * * `'socket'` + * * `'response'` + * * `'data'` any number of times, on the `res` object + * * (`req.abort()` called here) + * * `'abort'` + * * `'aborted'` on the `res` object + * * `'error'` on the `res` object with an error with message`'Error: aborted'` and code `'ECONNRESET'`. + * * `'close'` + * * `'close'` on the `res` object + * + * Setting the `timeout` option or using the `setTimeout()` function will + * not abort the request or do anything besides add a `'timeout'` event. + * + * Passing an `AbortSignal` and then calling `abort` on the corresponding`AbortController` will behave the same way as calling `.destroy()` on the + * request itself. + * @since v0.3.6 + */ + function request(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: IncomingMessage) => void, + ): ClientRequest; + /** + * Since most requests are GET requests without bodies, Node.js provides this + * convenience method. The only difference between this method and {@link request} is that it sets the method to GET and calls `req.end()`automatically. The callback must take care to consume the + * response + * data for reasons stated in {@link ClientRequest} section. + * + * The `callback` is invoked with a single argument that is an instance of {@link IncomingMessage}. + * + * JSON fetching example: + * + * ```js + * http.get('http://localhost:8000/', (res) => { + * const { statusCode } = res; + * const contentType = res.headers['content-type']; + * + * let error; + * // Any 2xx status code signals a successful response but + * // here we're only checking for 200. + * if (statusCode !== 200) { + * error = new Error('Request Failed.\n' + + * `Status Code: ${statusCode}`); + * } else if (!/^application\/json/.test(contentType)) { + * error = new Error('Invalid content-type.\n' + + * `Expected application/json but received ${contentType}`); + * } + * if (error) { + * console.error(error.message); + * // Consume response data to free up memory + * res.resume(); + * return; + * } + * + * res.setEncoding('utf8'); + * let rawData = ''; + * res.on('data', (chunk) => { rawData += chunk; }); + * res.on('end', () => { + * try { + * const parsedData = JSON.parse(rawData); + * console.log(parsedData); + * } catch (e) { + * console.error(e.message); + * } + * }); + * }).on('error', (e) => { + * console.error(`Got error: ${e.message}`); + * }); + * + * // Create a local server to receive data from + * const server = http.createServer((req, res) => { + * res.writeHead(200, { 'Content-Type': 'application/json' }); + * res.end(JSON.stringify({ + * data: 'Hello World!' + * })); + * }); + * + * server.listen(8000); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. Properties that are inherited from the prototype are ignored. + */ + function get(options: RequestOptions | string | URL, callback?: (res: IncomingMessage) => void): ClientRequest; + function get(url: string | URL, options: RequestOptions, callback?: (res: IncomingMessage) => void): ClientRequest; + + /** + * Performs the low-level validations on the provided name that are done when `res.setHeader(name, value)` is called. + * Passing illegal value as name will result in a TypeError being thrown, identified by `code: 'ERR_INVALID_HTTP_TOKEN'`. + * @param name Header name + * @since v14.3.0 + */ + function validateHeaderName(name: string): void; + /** + * Performs the low-level validations on the provided value that are done when `res.setHeader(name, value)` is called. + * Passing illegal value as value will result in a TypeError being thrown. + * - Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`. + * - Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`. + * @param name Header name + * @param value Header value + * @since v14.3.0 + */ + function validateHeaderValue(name: string, value: string): void; + + let globalAgent: Agent; + /** + * Read-only property specifying the maximum allowed size of HTTP headers in bytes. + * Defaults to 16KB. Configurable using the `--max-http-header-size` CLI option. + */ + const maxHeaderSize: number; +} +declare module 'node:http' { + export * from 'http'; +} diff --git a/node_modules/@types/node/ts4.8/http2.d.ts b/node_modules/@types/node/ts4.8/http2.d.ts new file mode 100755 index 0000000..0e36826 --- /dev/null +++ b/node_modules/@types/node/ts4.8/http2.d.ts @@ -0,0 +1,2134 @@ +/** + * The `http2` module provides an implementation of the [HTTP/2](https://tools.ietf.org/html/rfc7540) protocol. It + * can be accessed using: + * + * ```js + * const http2 = require('http2'); + * ``` + * @since v8.4.0 + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/http2.js) + */ +declare module 'http2' { + import EventEmitter = require('node:events'); + import * as fs from 'node:fs'; + import * as net from 'node:net'; + import * as stream from 'node:stream'; + import * as tls from 'node:tls'; + import * as url from 'node:url'; + import { IncomingHttpHeaders as Http1IncomingHttpHeaders, OutgoingHttpHeaders, IncomingMessage, ServerResponse } from 'node:http'; + export { OutgoingHttpHeaders } from 'node:http'; + export interface IncomingHttpStatusHeader { + ':status'?: number | undefined; + } + export interface IncomingHttpHeaders extends Http1IncomingHttpHeaders { + ':path'?: string | undefined; + ':method'?: string | undefined; + ':authority'?: string | undefined; + ':scheme'?: string | undefined; + } + // Http2Stream + export interface StreamPriorityOptions { + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + silent?: boolean | undefined; + } + export interface StreamState { + localWindowSize?: number | undefined; + state?: number | undefined; + localClose?: number | undefined; + remoteClose?: number | undefined; + sumDependencyWeight?: number | undefined; + weight?: number | undefined; + } + export interface ServerStreamResponseOptions { + endStream?: boolean | undefined; + waitForTrailers?: boolean | undefined; + } + export interface StatOptions { + offset: number; + length: number; + } + export interface ServerStreamFileResponseOptions { + statCheck?(stats: fs.Stats, headers: OutgoingHttpHeaders, statOptions: StatOptions): void | boolean; + waitForTrailers?: boolean | undefined; + offset?: number | undefined; + length?: number | undefined; + } + export interface ServerStreamFileResponseOptionsWithError extends ServerStreamFileResponseOptions { + onError?(err: NodeJS.ErrnoException): void; + } + export interface Http2Stream extends stream.Duplex { + /** + * Set to `true` if the `Http2Stream` instance was aborted abnormally. When set, + * the `'aborted'` event will have been emitted. + * @since v8.4.0 + */ + readonly aborted: boolean; + /** + * This property shows the number of characters currently buffered to be written. + * See `net.Socket.bufferSize` for details. + * @since v11.2.0, v10.16.0 + */ + readonly bufferSize: number; + /** + * Set to `true` if the `Http2Stream` instance has been closed. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Set to `true` if the `Http2Stream` instance has been destroyed and is no longer + * usable. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Set to `true` if the `END_STREAM` flag was set in the request or response + * HEADERS frame received, indicating that no additional data should be received + * and the readable side of the `Http2Stream` will be closed. + * @since v10.11.0 + */ + readonly endAfterHeaders: boolean; + /** + * The numeric stream identifier of this `Http2Stream` instance. Set to `undefined`if the stream identifier has not yet been assigned. + * @since v8.4.0 + */ + readonly id?: number | undefined; + /** + * Set to `true` if the `Http2Stream` instance has not yet been assigned a + * numeric stream identifier. + * @since v9.4.0 + */ + readonly pending: boolean; + /** + * Set to the `RST_STREAM` `error code` reported when the `Http2Stream` is + * destroyed after either receiving an `RST_STREAM` frame from the connected peer, + * calling `http2stream.close()`, or `http2stream.destroy()`. Will be`undefined` if the `Http2Stream` has not been closed. + * @since v8.4.0 + */ + readonly rstCode: number; + /** + * An object containing the outbound headers sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentHeaders: OutgoingHttpHeaders; + /** + * An array of objects containing the outbound informational (additional) headers + * sent for this `Http2Stream`. + * @since v9.5.0 + */ + readonly sentInfoHeaders?: OutgoingHttpHeaders[] | undefined; + /** + * An object containing the outbound trailers sent for this `HttpStream`. + * @since v9.5.0 + */ + readonly sentTrailers?: OutgoingHttpHeaders | undefined; + /** + * A reference to the `Http2Session` instance that owns this `Http2Stream`. The + * value will be `undefined` after the `Http2Stream` instance is destroyed. + * @since v8.4.0 + */ + readonly session: Http2Session; + /** + * Provides miscellaneous information about the current state of the`Http2Stream`. + * + * A current state of this `Http2Stream`. + * @since v8.4.0 + */ + readonly state: StreamState; + /** + * Closes the `Http2Stream` instance by sending an `RST_STREAM` frame to the + * connected HTTP/2 peer. + * @since v8.4.0 + * @param [code=http2.constants.NGHTTP2_NO_ERROR] Unsigned 32-bit integer identifying the error code. + * @param callback An optional function registered to listen for the `'close'` event. + */ + close(code?: number, callback?: () => void): void; + /** + * Updates the priority for this `Http2Stream` instance. + * @since v8.4.0 + */ + priority(options: StreamPriorityOptions): void; + /** + * ```js + * const http2 = require('http2'); + * const client = http2.connect('http://example.org:8000'); + * const { NGHTTP2_CANCEL } = http2.constants; + * const req = client.request({ ':path': '/' }); + * + * // Cancel the stream if there's no activity after 5 seconds + * req.setTimeout(5000, () => req.close(NGHTTP2_CANCEL)); + * ``` + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Sends a trailing `HEADERS` frame to the connected HTTP/2 peer. This method + * will cause the `Http2Stream` to be immediately closed and must only be + * called after the `'wantTrailers'` event has been emitted. When sending a + * request or sending a response, the `options.waitForTrailers` option must be set + * in order to keep the `Http2Stream` open after the final `DATA` frame so that + * trailers can be sent. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond(undefined, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ xyz: 'abc' }); + * }); + * stream.end('Hello World'); + * }); + * ``` + * + * The HTTP/1 specification forbids trailers from containing HTTP/2 pseudo-header + * fields (e.g. `':method'`, `':path'`, etc). + * @since v10.0.0 + */ + sendTrailers(headers: OutgoingHttpHeaders): void; + addListener(event: 'aborted', listener: () => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'streamClosed', listener: (code: number) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'wantTrailers', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'aborted'): boolean; + emit(event: 'close'): boolean; + emit(event: 'data', chunk: Buffer | string): boolean; + emit(event: 'drain'): boolean; + emit(event: 'end'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'finish'): boolean; + emit(event: 'frameError', frameType: number, errorCode: number): boolean; + emit(event: 'pipe', src: stream.Readable): boolean; + emit(event: 'unpipe', src: stream.Readable): boolean; + emit(event: 'streamClosed', code: number): boolean; + emit(event: 'timeout'): boolean; + emit(event: 'trailers', trailers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'wantTrailers'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'aborted', listener: () => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: Buffer | string) => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: 'streamClosed', listener: (code: number) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'wantTrailers', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'aborted', listener: () => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: Buffer | string) => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: 'streamClosed', listener: (code: number) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'wantTrailers', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'aborted', listener: () => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'streamClosed', listener: (code: number) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'wantTrailers', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'aborted', listener: () => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'frameError', listener: (frameType: number, errorCode: number) => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'streamClosed', listener: (code: number) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: 'trailers', listener: (trailers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'wantTrailers', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Stream extends Http2Stream { + addListener(event: 'continue', listener: () => {}): this; + addListener(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + addListener(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'continue'): boolean; + emit(event: 'headers', headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: 'push', headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'response', headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'continue', listener: () => {}): this; + on(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + on(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'continue', listener: () => {}): this; + once(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + once(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'continue', listener: () => {}): this; + prependListener(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependListener(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'continue', listener: () => {}): this; + prependOnceListener(event: 'headers', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependOnceListener(event: 'push', listener: (headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'response', listener: (headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ServerHttp2Stream extends Http2Stream { + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * Read-only property mapped to the `SETTINGS_ENABLE_PUSH` flag of the remote + * client's most recent `SETTINGS` frame. Will be `true` if the remote peer + * accepts push streams, `false` otherwise. Settings are the same for every`Http2Stream` in the same `Http2Session`. + * @since v8.4.0 + */ + readonly pushAllowed: boolean; + /** + * Sends an additional informational `HEADERS` frame to the connected HTTP/2 peer. + * @since v8.4.0 + */ + additionalHeaders(headers: OutgoingHttpHeaders): void; + /** + * Initiates a push stream. The callback is invoked with the new `Http2Stream`instance created for the push stream passed as the second argument, or an`Error` passed as the first argument. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.pushStream({ ':path': '/' }, (err, pushStream, headers) => { + * if (err) throw err; + * pushStream.respond({ ':status': 200 }); + * pushStream.end('some pushed data'); + * }); + * stream.end('some data'); + * }); + * ``` + * + * Setting the weight of a push stream is not allowed in the `HEADERS` frame. Pass + * a `weight` value to `http2stream.priority` with the `silent` option set to`true` to enable server-side bandwidth balancing between concurrent streams. + * + * Calling `http2stream.pushStream()` from within a pushed stream is not permitted + * and will throw an error. + * @since v8.4.0 + * @param callback Callback that is called once the push stream has been initiated. + */ + pushStream(headers: OutgoingHttpHeaders, callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void): void; + pushStream(headers: OutgoingHttpHeaders, options?: StreamPriorityOptions, callback?: (err: Error | null, pushStream: ServerHttp2Stream, headers: OutgoingHttpHeaders) => void): void; + /** + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }); + * stream.end('some data'); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respond({ ':status': 200 }, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * stream.end('some data'); + * }); + * ``` + * @since v8.4.0 + */ + respond(headers?: OutgoingHttpHeaders, options?: ServerStreamResponseOptions): void; + /** + * Initiates a response whose data is read from the given file descriptor. No + * validation is performed on the given file descriptor. If an error occurs while + * attempting to read data using the file descriptor, the `Http2Stream` will be + * closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR` code. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * ```js + * const http2 = require('http2'); + * const fs = require('fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8' + * }; + * stream.respondWithFD(fd, headers); + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given fd. If the `statCheck` function is provided, the`http2stream.respondWithFD()` method will perform an `fs.fstat()` call to + * collect details on the provided file descriptor. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The file descriptor or `FileHandle` is not closed when the stream is closed, + * so it will need to be closed manually once it is no longer needed. + * Using the same file descriptor concurrently for multiple streams + * is not supported and may result in data loss. Re-using a file descriptor + * after a stream has finished is supported. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code _must_ call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('http2'); + * const fs = require('fs'); + * + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * const fd = fs.openSync('/some/file', 'r'); + * + * const stat = fs.fstatSync(fd); + * const headers = { + * 'content-length': stat.size, + * 'last-modified': stat.mtime.toUTCString(), + * 'content-type': 'text/plain; charset=utf-8' + * }; + * stream.respondWithFD(fd, headers, { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * + * stream.on('close', () => fs.closeSync(fd)); + * }); + * ``` + * @since v8.4.0 + * @param fd A readable file descriptor. + */ + respondWithFD(fd: number | fs.promises.FileHandle, headers?: OutgoingHttpHeaders, options?: ServerStreamFileResponseOptions): void; + /** + * Sends a regular file as the response. The `path` must specify a regular file + * or an `'error'` event will be emitted on the `Http2Stream` object. + * + * When used, the `Http2Stream` object's `Duplex` interface will be closed + * automatically. + * + * The optional `options.statCheck` function may be specified to give user code + * an opportunity to set additional content headers based on the `fs.Stat` details + * of the given file: + * + * If an error occurs while attempting to read the file data, the `Http2Stream`will be closed using an `RST_STREAM` frame using the standard `INTERNAL_ERROR`code. If the `onError` callback is + * defined, then it will be called. Otherwise + * the stream will be destroyed. + * + * Example using a file path: + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * headers['last-modified'] = stat.mtime.toUTCString(); + * } + * + * function onError(err) { + * // stream.respond() can throw if the stream has been destroyed by + * // the other side. + * try { + * if (err.code === 'ENOENT') { + * stream.respond({ ':status': 404 }); + * } else { + * stream.respond({ ':status': 500 }); + * } + * } catch (err) { + * // Perform actual error handling. + * console.log(err); + * } + * stream.end(); + * } + * + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck, onError }); + * }); + * ``` + * + * The `options.statCheck` function may also be used to cancel the send operation + * by returning `false`. For instance, a conditional request may check the stat + * results to determine if the file has been modified to return an appropriate`304` response: + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * function statCheck(stat, headers) { + * // Check the stat here... + * stream.respond({ ':status': 304 }); + * return false; // Cancel the send operation + * } + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { statCheck }); + * }); + * ``` + * + * The `content-length` header field will be automatically set. + * + * The `offset` and `length` options may be used to limit the response to a + * specific range subset. This can be used, for instance, to support HTTP Range + * requests. + * + * The `options.onError` function may also be used to handle all the errors + * that could happen before the delivery of the file is initiated. The + * default behavior is to destroy the stream. + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * will be emitted immediately after queuing the last chunk of payload data to be + * sent. The `http2stream.sendTrailers()` method can then be used to sent trailing + * header fields to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer(); + * server.on('stream', (stream) => { + * stream.respondWithFile('/some/file', + * { 'content-type': 'text/plain; charset=utf-8' }, + * { waitForTrailers: true }); + * stream.on('wantTrailers', () => { + * stream.sendTrailers({ ABC: 'some value to send' }); + * }); + * }); + * ``` + * @since v8.4.0 + */ + respondWithFile(path: string, headers?: OutgoingHttpHeaders, options?: ServerStreamFileResponseOptionsWithError): void; + } + // Http2Session + export interface Settings { + headerTableSize?: number | undefined; + enablePush?: boolean | undefined; + initialWindowSize?: number | undefined; + maxFrameSize?: number | undefined; + maxConcurrentStreams?: number | undefined; + maxHeaderListSize?: number | undefined; + enableConnectProtocol?: boolean | undefined; + } + export interface ClientSessionRequestOptions { + endStream?: boolean | undefined; + exclusive?: boolean | undefined; + parent?: number | undefined; + weight?: number | undefined; + waitForTrailers?: boolean | undefined; + signal?: AbortSignal | undefined; + } + export interface SessionState { + effectiveLocalWindowSize?: number | undefined; + effectiveRecvDataLength?: number | undefined; + nextStreamID?: number | undefined; + localWindowSize?: number | undefined; + lastProcStreamID?: number | undefined; + remoteWindowSize?: number | undefined; + outboundQueueSize?: number | undefined; + deflateDynamicTableSize?: number | undefined; + inflateDynamicTableSize?: number | undefined; + } + export interface Http2Session extends EventEmitter { + /** + * Value will be `undefined` if the `Http2Session` is not yet connected to a + * socket, `h2c` if the `Http2Session` is not connected to a `TLSSocket`, or + * will return the value of the connected `TLSSocket`'s own `alpnProtocol`property. + * @since v9.4.0 + */ + readonly alpnProtocol?: string | undefined; + /** + * Will be `true` if this `Http2Session` instance has been closed, otherwise`false`. + * @since v9.4.0 + */ + readonly closed: boolean; + /** + * Will be `true` if this `Http2Session` instance is still connecting, will be set + * to `false` before emitting `connect` event and/or calling the `http2.connect`callback. + * @since v10.0.0 + */ + readonly connecting: boolean; + /** + * Will be `true` if this `Http2Session` instance has been destroyed and must no + * longer be used, otherwise `false`. + * @since v8.4.0 + */ + readonly destroyed: boolean; + /** + * Value is `undefined` if the `Http2Session` session socket has not yet been + * connected, `true` if the `Http2Session` is connected with a `TLSSocket`, + * and `false` if the `Http2Session` is connected to any other kind of socket + * or stream. + * @since v9.4.0 + */ + readonly encrypted?: boolean | undefined; + /** + * A prototype-less object describing the current local settings of this`Http2Session`. The local settings are local to _this_`Http2Session` instance. + * @since v8.4.0 + */ + readonly localSettings: Settings; + /** + * If the `Http2Session` is connected to a `TLSSocket`, the `originSet` property + * will return an `Array` of origins for which the `Http2Session` may be + * considered authoritative. + * + * The `originSet` property is only available when using a secure TLS connection. + * @since v9.4.0 + */ + readonly originSet?: string[] | undefined; + /** + * Indicates whether the `Http2Session` is currently waiting for acknowledgment of + * a sent `SETTINGS` frame. Will be `true` after calling the`http2session.settings()` method. Will be `false` once all sent `SETTINGS`frames have been acknowledged. + * @since v8.4.0 + */ + readonly pendingSettingsAck: boolean; + /** + * A prototype-less object describing the current remote settings of this`Http2Session`. The remote settings are set by the _connected_ HTTP/2 peer. + * @since v8.4.0 + */ + readonly remoteSettings: Settings; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * limits available methods to ones safe to use with HTTP/2. + * + * `destroy`, `emit`, `end`, `pause`, `read`, `resume`, and `write` will throw + * an error with code `ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for more information. + * + * `setTimeout` method will be called on this `Http2Session`. + * + * All other interactions will be routed directly to the socket. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * Provides miscellaneous information about the current state of the`Http2Session`. + * + * An object describing the current status of this `Http2Session`. + * @since v8.4.0 + */ + readonly state: SessionState; + /** + * The `http2session.type` will be equal to`http2.constants.NGHTTP2_SESSION_SERVER` if this `Http2Session` instance is a + * server, and `http2.constants.NGHTTP2_SESSION_CLIENT` if the instance is a + * client. + * @since v8.4.0 + */ + readonly type: number; + /** + * Gracefully closes the `Http2Session`, allowing any existing streams to + * complete on their own and preventing new `Http2Stream` instances from being + * created. Once closed, `http2session.destroy()`_might_ be called if there + * are no open `Http2Stream` instances. + * + * If specified, the `callback` function is registered as a handler for the`'close'` event. + * @since v9.4.0 + */ + close(callback?: () => void): void; + /** + * Immediately terminates the `Http2Session` and the associated `net.Socket` or`tls.TLSSocket`. + * + * Once destroyed, the `Http2Session` will emit the `'close'` event. If `error`is not undefined, an `'error'` event will be emitted immediately before the`'close'` event. + * + * If there are any remaining open `Http2Streams` associated with the`Http2Session`, those will also be destroyed. + * @since v8.4.0 + * @param error An `Error` object if the `Http2Session` is being destroyed due to an error. + * @param code The HTTP/2 error code to send in the final `GOAWAY` frame. If unspecified, and `error` is not undefined, the default is `INTERNAL_ERROR`, otherwise defaults to `NO_ERROR`. + */ + destroy(error?: Error, code?: number): void; + /** + * Transmits a `GOAWAY` frame to the connected peer _without_ shutting down the`Http2Session`. + * @since v9.4.0 + * @param code An HTTP/2 error code + * @param lastStreamID The numeric ID of the last processed `Http2Stream` + * @param opaqueData A `TypedArray` or `DataView` instance containing additional data to be carried within the `GOAWAY` frame. + */ + goaway(code?: number, lastStreamID?: number, opaqueData?: NodeJS.ArrayBufferView): void; + /** + * Sends a `PING` frame to the connected HTTP/2 peer. A `callback` function must + * be provided. The method will return `true` if the `PING` was sent, `false`otherwise. + * + * The maximum number of outstanding (unacknowledged) pings is determined by the`maxOutstandingPings` configuration option. The default maximum is 10. + * + * If provided, the `payload` must be a `Buffer`, `TypedArray`, or `DataView`containing 8 bytes of data that will be transmitted with the `PING` and + * returned with the ping acknowledgment. + * + * The callback will be invoked with three arguments: an error argument that will + * be `null` if the `PING` was successfully acknowledged, a `duration` argument + * that reports the number of milliseconds elapsed since the ping was sent and the + * acknowledgment was received, and a `Buffer` containing the 8-byte `PING`payload. + * + * ```js + * session.ping(Buffer.from('abcdefgh'), (err, duration, payload) => { + * if (!err) { + * console.log(`Ping acknowledged in ${duration} milliseconds`); + * console.log(`With payload '${payload.toString()}'`); + * } + * }); + * ``` + * + * If the `payload` argument is not specified, the default payload will be the + * 64-bit timestamp (little endian) marking the start of the `PING` duration. + * @since v8.9.3 + * @param payload Optional ping payload. + */ + ping(callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + ping(payload: NodeJS.ArrayBufferView, callback: (err: Error | null, duration: number, payload: Buffer) => void): boolean; + /** + * Calls `ref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + ref(): void; + /** + * Sets the local endpoint's window size. + * The `windowSize` is the total window size to set, not + * the delta. + * + * ```js + * const http2 = require('http2'); + * + * const server = http2.createServer(); + * const expectedWindowSize = 2 ** 20; + * server.on('connect', (session) => { + * + * // Set local window size to be 2 ** 20 + * session.setLocalWindowSize(expectedWindowSize); + * }); + * ``` + * @since v15.3.0, v14.18.0 + */ + setLocalWindowSize(windowSize: number): void; + /** + * Used to set a callback function that is called when there is no activity on + * the `Http2Session` after `msecs` milliseconds. The given `callback` is + * registered as a listener on the `'timeout'` event. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * Updates the current local settings for this `Http2Session` and sends a new`SETTINGS` frame to the connected HTTP/2 peer. + * + * Once called, the `http2session.pendingSettingsAck` property will be `true`while the session is waiting for the remote peer to acknowledge the new + * settings. + * + * The new settings will not become effective until the `SETTINGS` acknowledgment + * is received and the `'localSettings'` event is emitted. It is possible to send + * multiple `SETTINGS` frames while acknowledgment is still pending. + * @since v8.4.0 + * @param callback Callback that is called once the session is connected or right away if the session is already connected. + */ + settings(settings: Settings, callback?: (err: Error | null, settings: Settings, duration: number) => void): void; + /** + * Calls `unref()` on this `Http2Session`instance's underlying `net.Socket`. + * @since v9.4.0 + */ + unref(): void; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + addListener(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + addListener(event: 'localSettings', listener: (settings: Settings) => void): this; + addListener(event: 'ping', listener: () => void): this; + addListener(event: 'remoteSettings', listener: (settings: Settings) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'frameError', frameType: number, errorCode: number, streamID: number): boolean; + emit(event: 'goaway', errorCode: number, lastStreamID: number, opaqueData: Buffer): boolean; + emit(event: 'localSettings', settings: Settings): boolean; + emit(event: 'ping'): boolean; + emit(event: 'remoteSettings', settings: Settings): boolean; + emit(event: 'timeout'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + on(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + on(event: 'localSettings', listener: (settings: Settings) => void): this; + on(event: 'ping', listener: () => void): this; + on(event: 'remoteSettings', listener: (settings: Settings) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + once(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + once(event: 'localSettings', listener: (settings: Settings) => void): this; + once(event: 'ping', listener: () => void): this; + once(event: 'remoteSettings', listener: (settings: Settings) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + prependListener(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + prependListener(event: 'localSettings', listener: (settings: Settings) => void): this; + prependListener(event: 'ping', listener: () => void): this; + prependListener(event: 'remoteSettings', listener: (settings: Settings) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'frameError', listener: (frameType: number, errorCode: number, streamID: number) => void): this; + prependOnceListener(event: 'goaway', listener: (errorCode: number, lastStreamID: number, opaqueData: Buffer) => void): this; + prependOnceListener(event: 'localSettings', listener: (settings: Settings) => void): this; + prependOnceListener(event: 'ping', listener: () => void): this; + prependOnceListener(event: 'remoteSettings', listener: (settings: Settings) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface ClientHttp2Session extends Http2Session { + /** + * For HTTP/2 Client `Http2Session` instances only, the `http2session.request()`creates and returns an `Http2Stream` instance that can be used to send an + * HTTP/2 request to the connected server. + * + * When a `ClientHttp2Session` is first created, the socket may not yet be + * connected. if `clienthttp2session.request()` is called during this time, the + * actual request will be deferred until the socket is ready to go. + * If the `session` is closed before the actual request be executed, an`ERR_HTTP2_GOAWAY_SESSION` is thrown. + * + * This method is only available if `http2session.type` is equal to`http2.constants.NGHTTP2_SESSION_CLIENT`. + * + * ```js + * const http2 = require('http2'); + * const clientSession = http2.connect('https://localhost:1234'); + * const { + * HTTP2_HEADER_PATH, + * HTTP2_HEADER_STATUS + * } = http2.constants; + * + * const req = clientSession.request({ [HTTP2_HEADER_PATH]: '/' }); + * req.on('response', (headers) => { + * console.log(headers[HTTP2_HEADER_STATUS]); + * req.on('data', (chunk) => { // .. }); + * req.on('end', () => { // .. }); + * }); + * ``` + * + * When the `options.waitForTrailers` option is set, the `'wantTrailers'` event + * is emitted immediately after queuing the last chunk of payload data to be sent. + * The `http2stream.sendTrailers()` method can then be called to send trailing + * headers to the peer. + * + * When `options.waitForTrailers` is set, the `Http2Stream` will not automatically + * close when the final `DATA` frame is transmitted. User code must call either`http2stream.sendTrailers()` or `http2stream.close()` to close the`Http2Stream`. + * + * When `options.signal` is set with an `AbortSignal` and then `abort` on the + * corresponding `AbortController` is called, the request will emit an `'error'`event with an `AbortError` error. + * + * The `:method` and `:path` pseudo-headers are not specified within `headers`, + * they respectively default to: + * + * * `:method` \= `'GET'` + * * `:path` \= `/` + * @since v8.4.0 + */ + request(headers?: OutgoingHttpHeaders, options?: ClientSessionRequestOptions): ClientHttp2Stream; + addListener(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + addListener(event: 'origin', listener: (origins: string[]) => void): this; + addListener(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + addListener(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'altsvc', alt: string, origin: string, stream: number): boolean; + emit(event: 'origin', origins: ReadonlyArray): boolean; + emit(event: 'connect', session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: 'stream', stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + on(event: 'origin', listener: (origins: string[]) => void): this; + on(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + once(event: 'origin', listener: (origins: string[]) => void): this; + once(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + once(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + prependListener(event: 'origin', listener: (origins: string[]) => void): this; + prependListener(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependListener(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'altsvc', listener: (alt: string, origin: string, stream: number) => void): this; + prependOnceListener(event: 'origin', listener: (origins: string[]) => void): this; + prependOnceListener(event: 'connect', listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ClientHttp2Stream, headers: IncomingHttpHeaders & IncomingHttpStatusHeader, flags: number) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface AlternativeServiceOptions { + origin: number | string | url.URL; + } + export interface ServerHttp2Session extends Http2Session { + readonly server: Http2Server | Http2SecureServer; + /** + * Submits an `ALTSVC` frame (as defined by [RFC 7838](https://tools.ietf.org/html/rfc7838)) to the connected client. + * + * ```js + * const http2 = require('http2'); + * + * const server = http2.createServer(); + * server.on('session', (session) => { + * // Set altsvc for origin https://example.org:80 + * session.altsvc('h2=":8000"', 'https://example.org:80'); + * }); + * + * server.on('stream', (stream) => { + * // Set altsvc for a specific stream + * stream.session.altsvc('h2=":8000"', stream.id); + * }); + * ``` + * + * Sending an `ALTSVC` frame with a specific stream ID indicates that the alternate + * service is associated with the origin of the given `Http2Stream`. + * + * The `alt` and origin string _must_ contain only ASCII bytes and are + * strictly interpreted as a sequence of ASCII bytes. The special value `'clear'`may be passed to clear any previously set alternative service for a given + * domain. + * + * When a string is passed for the `originOrStream` argument, it will be parsed as + * a URL and the origin will be derived. For instance, the origin for the + * HTTP URL `'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as`originOrStream`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * @since v9.4.0 + * @param alt A description of the alternative service configuration as defined by `RFC 7838`. + * @param originOrStream Either a URL string specifying the origin (or an `Object` with an `origin` property) or the numeric identifier of an active `Http2Stream` as given by the + * `http2stream.id` property. + */ + altsvc(alt: string, originOrStream: number | string | url.URL | AlternativeServiceOptions): void; + /** + * Submits an `ORIGIN` frame (as defined by [RFC 8336](https://tools.ietf.org/html/rfc8336)) to the connected client + * to advertise the set of origins for which the server is capable of providing + * authoritative responses. + * + * ```js + * const http2 = require('http2'); + * const options = getSecureOptionsSomehow(); + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * server.on('session', (session) => { + * session.origin('https://example.com', 'https://example.org'); + * }); + * ``` + * + * When a string is passed as an `origin`, it will be parsed as a URL and the + * origin will be derived. For instance, the origin for the HTTP URL`'https://example.org/foo/bar'` is the ASCII string`'https://example.org'`. An error will be thrown if either the given + * string + * cannot be parsed as a URL or if a valid origin cannot be derived. + * + * A `URL` object, or any object with an `origin` property, may be passed as + * an `origin`, in which case the value of the `origin` property will be + * used. The value of the `origin` property _must_ be a properly serialized + * ASCII origin. + * + * Alternatively, the `origins` option may be used when creating a new HTTP/2 + * server using the `http2.createSecureServer()` method: + * + * ```js + * const http2 = require('http2'); + * const options = getSecureOptionsSomehow(); + * options.origins = ['https://example.com', 'https://example.org']; + * const server = http2.createSecureServer(options); + * server.on('stream', (stream) => { + * stream.respond(); + * stream.end('ok'); + * }); + * ``` + * @since v10.12.0 + * @param origins One or more URL Strings passed as separate arguments. + */ + origin( + ...origins: Array< + | string + | url.URL + | { + origin: string; + } + > + ): void; + addListener(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + addListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'connect', session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket): boolean; + emit(event: 'stream', stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + on(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + once(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'connect', listener: (session: ServerHttp2Session, socket: net.Socket | tls.TLSSocket) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + // Http2Server + export interface SessionOptions { + maxDeflateDynamicTableSize?: number | undefined; + maxSessionMemory?: number | undefined; + maxHeaderListPairs?: number | undefined; + maxOutstandingPings?: number | undefined; + maxSendHeaderBlockLength?: number | undefined; + paddingStrategy?: number | undefined; + peerMaxConcurrentStreams?: number | undefined; + settings?: Settings | undefined; + /** + * Specifies a timeout in milliseconds that + * a server should wait when an [`'unknownProtocol'`][] is emitted. If the + * socket has not been destroyed by that time the server will destroy it. + * @default 100000 + */ + unknownProtocolTimeout?: number | undefined; + selectPadding?(frameLen: number, maxFrameLen: number): number; + createConnection?(authority: url.URL, option: SessionOptions): stream.Duplex; + } + export interface ClientSessionOptions extends SessionOptions { + maxReservedRemoteStreams?: number | undefined; + createConnection?: ((authority: url.URL, option: SessionOptions) => stream.Duplex) | undefined; + protocol?: 'http:' | 'https:' | undefined; + } + export interface ServerSessionOptions extends SessionOptions { + Http1IncomingMessage?: typeof IncomingMessage | undefined; + Http1ServerResponse?: typeof ServerResponse | undefined; + Http2ServerRequest?: typeof Http2ServerRequest | undefined; + Http2ServerResponse?: typeof Http2ServerResponse | undefined; + } + export interface SecureClientSessionOptions extends ClientSessionOptions, tls.ConnectionOptions {} + export interface SecureServerSessionOptions extends ServerSessionOptions, tls.TlsOptions {} + export interface ServerOptions extends ServerSessionOptions {} + export interface SecureServerOptions extends SecureServerSessionOptions { + allowHTTP1?: boolean | undefined; + origins?: string[] | undefined; + } + interface HTTP2ServerCommon { + setTimeout(msec?: number, callback?: () => void): this; + /** + * Throws ERR_HTTP2_INVALID_SETTING_VALUE for invalid settings values. + * Throws ERR_INVALID_ARG_TYPE for invalid settings argument. + */ + updateSettings(settings: Settings): void; + } + export interface Http2Server extends net.Server, HTTP2ServerCommon { + addListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + addListener(event: 'sessionError', listener: (err: Error) => void): this; + addListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'checkContinue', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'request', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'session', session: ServerHttp2Session): boolean; + emit(event: 'sessionError', err: Error): boolean; + emit(event: 'stream', stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'timeout'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'session', listener: (session: ServerHttp2Session) => void): this; + on(event: 'sessionError', listener: (err: Error) => void): this; + on(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'session', listener: (session: ServerHttp2Session) => void): this; + once(event: 'sessionError', listener: (err: Error) => void): this; + once(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependListener(event: 'sessionError', listener: (err: Error) => void): this; + prependListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: 'sessionError', listener: (err: Error) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export interface Http2SecureServer extends tls.Server, HTTP2ServerCommon { + addListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + addListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + addListener(event: 'sessionError', listener: (err: Error) => void): this; + addListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + addListener(event: 'timeout', listener: () => void): this; + addListener(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'checkContinue', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'request', request: Http2ServerRequest, response: Http2ServerResponse): boolean; + emit(event: 'session', session: ServerHttp2Session): boolean; + emit(event: 'sessionError', err: Error): boolean; + emit(event: 'stream', stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number): boolean; + emit(event: 'timeout'): boolean; + emit(event: 'unknownProtocol', socket: tls.TLSSocket): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + on(event: 'session', listener: (session: ServerHttp2Session) => void): this; + on(event: 'sessionError', listener: (err: Error) => void): this; + on(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + on(event: 'timeout', listener: () => void): this; + on(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + once(event: 'session', listener: (session: ServerHttp2Session) => void): this; + once(event: 'sessionError', listener: (err: Error) => void): this; + once(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + once(event: 'timeout', listener: () => void): this; + once(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependListener(event: 'sessionError', listener: (err: Error) => void): this; + prependListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependListener(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'checkContinue', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'request', listener: (request: Http2ServerRequest, response: Http2ServerResponse) => void): this; + prependOnceListener(event: 'session', listener: (session: ServerHttp2Session) => void): this; + prependOnceListener(event: 'sessionError', listener: (err: Error) => void): this; + prependOnceListener(event: 'stream', listener: (stream: ServerHttp2Stream, headers: IncomingHttpHeaders, flags: number) => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: 'unknownProtocol', listener: (socket: tls.TLSSocket) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * A `Http2ServerRequest` object is created by {@link Server} or {@link SecureServer} and passed as the first argument to the `'request'` event. It may be used to access a request status, + * headers, and + * data. + * @since v8.4.0 + */ + export class Http2ServerRequest extends stream.Readable { + constructor(stream: ServerHttp2Stream, headers: IncomingHttpHeaders, options: stream.ReadableOptions, rawHeaders: ReadonlyArray); + /** + * The `request.aborted` property will be `true` if the request has + * been aborted. + * @since v10.1.0 + */ + readonly aborted: boolean; + /** + * The request authority pseudo header field. Because HTTP/2 allows requests + * to set either `:authority` or `host`, this value is derived from`req.headers[':authority']` if present. Otherwise, it is derived from`req.headers['host']`. + * @since v8.4.0 + */ + readonly authority: string; + /** + * See `request.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * The `request.complete` property will be `true` if the request has + * been completed, aborted, or destroyed. + * @since v12.10.0 + */ + readonly complete: boolean; + /** + * The request/response headers object. + * + * Key-value pairs of header names and values. Header names are lower-cased. + * + * ```js + * // Prints something like: + * // + * // { 'user-agent': 'curl/7.22.0', + * // host: '127.0.0.1:8000', + * // accept: '*' } + * console.log(request.headers); + * ``` + * + * See `HTTP/2 Headers Object`. + * + * In HTTP/2, the request path, host name, protocol, and method are represented as + * special headers prefixed with the `:` character (e.g. `':path'`). These special + * headers will be included in the `request.headers` object. Care must be taken not + * to inadvertently modify these special headers or errors may occur. For instance, + * removing all headers from the request will cause errors to occur: + * + * ```js + * removeAllHeaders(request.headers); + * assert(request.url); // Fails because the :path header has been removed + * ``` + * @since v8.4.0 + */ + readonly headers: IncomingHttpHeaders; + /** + * In case of server request, the HTTP version sent by the client. In the case of + * client response, the HTTP version of the connected-to server. Returns`'2.0'`. + * + * Also `message.httpVersionMajor` is the first integer and`message.httpVersionMinor` is the second. + * @since v8.4.0 + */ + readonly httpVersion: string; + readonly httpVersionMinor: number; + readonly httpVersionMajor: number; + /** + * The request method as a string. Read-only. Examples: `'GET'`, `'DELETE'`. + * @since v8.4.0 + */ + readonly method: string; + /** + * The raw request/response headers list exactly as they were received. + * + * The keys and values are in the same list. It is _not_ a + * list of tuples. So, the even-numbered offsets are key values, and the + * odd-numbered offsets are the associated values. + * + * Header names are not lowercased, and duplicates are not merged. + * + * ```js + * // Prints something like: + * // + * // [ 'user-agent', + * // 'this is invalid because there can be only one', + * // 'User-Agent', + * // 'curl/7.22.0', + * // 'Host', + * // '127.0.0.1:8000', + * // 'ACCEPT', + * // '*' ] + * console.log(request.rawHeaders); + * ``` + * @since v8.4.0 + */ + readonly rawHeaders: string[]; + /** + * The raw request/response trailer keys and values exactly as they were + * received. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly rawTrailers: string[]; + /** + * The request scheme pseudo header field indicating the scheme + * portion of the target URL. + * @since v8.4.0 + */ + readonly scheme: string; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `request.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`request.stream`. + * + * `setTimeout` method will be called on `request.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. With TLS support, + * use `request.socket.getPeerCertificate()` to obtain the client's + * authentication details. + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the request. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * The request/response trailers object. Only populated at the `'end'` event. + * @since v8.4.0 + */ + readonly trailers: IncomingHttpHeaders; + /** + * Request URL string. This contains only the URL that is present in the actual + * HTTP request. If the request is: + * + * ```http + * GET /status?name=ryan HTTP/1.1 + * Accept: text/plain + * ``` + * + * Then `request.url` will be: + * + * ```js + * '/status?name=ryan' + * ``` + * + * To parse the url into its parts, `new URL()` can be used: + * + * ```console + * $ node + * > new URL('/status?name=ryan', 'http://example.com') + * URL { + * href: 'http://example.com/status?name=ryan', + * origin: 'http://example.com', + * protocol: 'http:', + * username: '', + * password: '', + * host: 'example.com', + * hostname: 'example.com', + * port: '', + * pathname: '/status', + * search: '?name=ryan', + * searchParams: URLSearchParams { 'name' => 'ryan' }, + * hash: '' + * } + * ``` + * @since v8.4.0 + */ + url: string; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + read(size?: number): Buffer | string | null; + addListener(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'readable', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'aborted', hadError: boolean, code: number): boolean; + emit(event: 'close'): boolean; + emit(event: 'data', chunk: Buffer | string): boolean; + emit(event: 'end'): boolean; + emit(event: 'readable'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: Buffer | string) => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'readable', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: Buffer | string) => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'readable', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'readable', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'aborted', listener: (hadError: boolean, code: number) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'readable', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + /** + * This object is created internally by an HTTP server, not by the user. It is + * passed as the second parameter to the `'request'` event. + * @since v8.4.0 + */ + export class Http2ServerResponse extends stream.Writable { + constructor(stream: ServerHttp2Stream); + /** + * See `response.socket`. + * @since v8.4.0 + * @deprecated Since v13.0.0 - Use `socket`. + */ + readonly connection: net.Socket | tls.TLSSocket; + /** + * Boolean value that indicates whether the response has completed. Starts + * as `false`. After `response.end()` executes, the value will be `true`. + * @since v8.4.0 + * @deprecated Since v13.4.0,v12.16.0 - Use `writableEnded`. + */ + readonly finished: boolean; + /** + * True if headers were sent, false otherwise (read-only). + * @since v8.4.0 + */ + readonly headersSent: boolean; + /** + * A reference to the original HTTP2 request object. + * @since v15.7.0 + */ + readonly req: Http2ServerRequest; + /** + * Returns a `Proxy` object that acts as a `net.Socket` (or `tls.TLSSocket`) but + * applies getters, setters, and methods based on HTTP/2 logic. + * + * `destroyed`, `readable`, and `writable` properties will be retrieved from and + * set on `response.stream`. + * + * `destroy`, `emit`, `end`, `on` and `once` methods will be called on`response.stream`. + * + * `setTimeout` method will be called on `response.stream.session`. + * + * `pause`, `read`, `resume`, and `write` will throw an error with code`ERR_HTTP2_NO_SOCKET_MANIPULATION`. See `Http2Session and Sockets` for + * more information. + * + * All other interactions will be routed directly to the socket. + * + * ```js + * const http2 = require('http2'); + * const server = http2.createServer((req, res) => { + * const ip = req.socket.remoteAddress; + * const port = req.socket.remotePort; + * res.end(`Your IP address is ${ip} and your source port is ${port}.`); + * }).listen(3000); + * ``` + * @since v8.4.0 + */ + readonly socket: net.Socket | tls.TLSSocket; + /** + * The `Http2Stream` object backing the response. + * @since v8.4.0 + */ + readonly stream: ServerHttp2Stream; + /** + * When true, the Date header will be automatically generated and sent in + * the response if it is not already present in the headers. Defaults to true. + * + * This should only be disabled for testing; HTTP requires the Date header + * in responses. + * @since v8.4.0 + */ + sendDate: boolean; + /** + * When using implicit headers (not calling `response.writeHead()` explicitly), + * this property controls the status code that will be sent to the client when + * the headers get flushed. + * + * ```js + * response.statusCode = 404; + * ``` + * + * After response header was sent to the client, this property indicates the + * status code which was sent out. + * @since v8.4.0 + */ + statusCode: number; + /** + * Status message is not supported by HTTP/2 (RFC 7540 8.1.2.4). It returns + * an empty string. + * @since v8.4.0 + */ + statusMessage: ''; + /** + * This method adds HTTP trailing headers (a header but at the end of the + * message) to the response. + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + addTrailers(trailers: OutgoingHttpHeaders): void; + /** + * This method signals to the server that all of the response headers and body + * have been sent; that server should consider this message complete. + * The method, `response.end()`, MUST be called on each response. + * + * If `data` is specified, it is equivalent to calling `response.write(data, encoding)` followed by `response.end(callback)`. + * + * If `callback` is specified, it will be called when the response stream + * is finished. + * @since v8.4.0 + */ + end(callback?: () => void): this; + end(data: string | Uint8Array, callback?: () => void): this; + end(data: string | Uint8Array, encoding: BufferEncoding, callback?: () => void): this; + /** + * Reads out a header that has already been queued but not sent to the client. + * The name is case-insensitive. + * + * ```js + * const contentType = response.getHeader('content-type'); + * ``` + * @since v8.4.0 + */ + getHeader(name: string): string; + /** + * Returns an array containing the unique names of the current outgoing headers. + * All header names are lowercase. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headerNames = response.getHeaderNames(); + * // headerNames === ['foo', 'set-cookie'] + * ``` + * @since v8.4.0 + */ + getHeaderNames(): string[]; + /** + * Returns a shallow copy of the current outgoing headers. Since a shallow copy + * is used, array values may be mutated without additional calls to various + * header-related http module methods. The keys of the returned object are the + * header names and the values are the respective header values. All header names + * are lowercase. + * + * The object returned by the `response.getHeaders()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * ```js + * response.setHeader('Foo', 'bar'); + * response.setHeader('Set-Cookie', ['foo=bar', 'bar=baz']); + * + * const headers = response.getHeaders(); + * // headers === { foo: 'bar', 'set-cookie': ['foo=bar', 'bar=baz'] } + * ``` + * @since v8.4.0 + */ + getHeaders(): OutgoingHttpHeaders; + /** + * Returns `true` if the header identified by `name` is currently set in the + * outgoing headers. The header name matching is case-insensitive. + * + * ```js + * const hasContentType = response.hasHeader('content-type'); + * ``` + * @since v8.4.0 + */ + hasHeader(name: string): boolean; + /** + * Removes a header that has been queued for implicit sending. + * + * ```js + * response.removeHeader('Content-Encoding'); + * ``` + * @since v8.4.0 + */ + removeHeader(name: string): void; + /** + * Sets a single header value for implicit headers. If this header already exists + * in the to-be-sent headers, its value will be replaced. Use an array of strings + * here to send multiple headers with the same name. + * + * ```js + * response.setHeader('Content-Type', 'text/html; charset=utf-8'); + * ``` + * + * or + * + * ```js + * response.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * @since v8.4.0 + */ + setHeader(name: string, value: number | string | ReadonlyArray): void; + /** + * Sets the `Http2Stream`'s timeout value to `msecs`. If a callback is + * provided, then it is added as a listener on the `'timeout'` event on + * the response object. + * + * If no `'timeout'` listener is added to the request, the response, or + * the server, then `Http2Stream` s are destroyed when they time out. If a + * handler is assigned to the request, the response, or the server's `'timeout'`events, timed out sockets must be handled explicitly. + * @since v8.4.0 + */ + setTimeout(msecs: number, callback?: () => void): void; + /** + * If this method is called and `response.writeHead()` has not been called, + * it will switch to implicit header mode and flush the implicit headers. + * + * This sends a chunk of the response body. This method may + * be called multiple times to provide successive parts of the body. + * + * In the `http` module, the response body is omitted when the + * request is a HEAD request. Similarly, the `204` and `304` responses _must not_ include a message body. + * + * `chunk` can be a string or a buffer. If `chunk` is a string, + * the second parameter specifies how to encode it into a byte stream. + * By default the `encoding` is `'utf8'`. `callback` will be called when this chunk + * of data is flushed. + * + * This is the raw HTTP body and has nothing to do with higher-level multi-part + * body encodings that may be used. + * + * The first time `response.write()` is called, it will send the buffered + * header information and the first chunk of the body to the client. The second + * time `response.write()` is called, Node.js assumes data will be streamed, + * and sends the new data separately. That is, the response is buffered up to the + * first chunk of the body. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is free again. + * @since v8.4.0 + */ + write(chunk: string | Uint8Array, callback?: (err: Error) => void): boolean; + write(chunk: string | Uint8Array, encoding: BufferEncoding, callback?: (err: Error) => void): boolean; + /** + * Sends a status `100 Continue` to the client, indicating that the request body + * should be sent. See the `'checkContinue'` event on `Http2Server` and`Http2SecureServer`. + * @since v8.4.0 + */ + writeContinue(): void; + /** + * Sends a status `103 Early Hints` to the client with a Link header, + * indicating that the user agent can preload/preconnect the linked resources. + * The `hints` is an object containing the values of headers to be sent with + * early hints message. + * + * Example: + * + * ```js + * const earlyHintsLink = '; rel=preload; as=style'; + * response.writeEarlyHints({ + * 'link': earlyHintsLink, + * }); + * + * const earlyHintsLinks = [ + * '; rel=preload; as=style', + * '; rel=preload; as=script', + * ]; + * response.writeEarlyHints({ + * 'link': earlyHintsLinks, + * 'x-trace-id': 'id for diagnostics' + * }); + * ``` + * + * @since v18.11.0 + * @param hints An object containing the values of headers + */ + writeEarlyHints(hints: Record): void; + /** + * Sends a response header to the request. The status code is a 3-digit HTTP + * status code, like `404`. The last argument, `headers`, are the response headers. + * + * Returns a reference to the `Http2ServerResponse`, so that calls can be chained. + * + * For compatibility with `HTTP/1`, a human-readable `statusMessage` may be + * passed as the second argument. However, because the `statusMessage` has no + * meaning within HTTP/2, the argument will have no effect and a process warning + * will be emitted. + * + * ```js + * const body = 'hello world'; + * response.writeHead(200, { + * 'Content-Length': Buffer.byteLength(body), + * 'Content-Type': 'text/plain; charset=utf-8', + * }); + * ``` + * + * `Content-Length` is given in bytes not characters. The`Buffer.byteLength()` API may be used to determine the number of bytes in a + * given encoding. On outbound messages, Node.js does not check if Content-Length + * and the length of the body being transmitted are equal or not. However, when + * receiving messages, Node.js will automatically reject messages when the`Content-Length` does not match the actual payload size. + * + * This method may be called at most one time on a message before `response.end()` is called. + * + * If `response.write()` or `response.end()` are called before calling + * this, the implicit/mutable headers will be calculated and call this function. + * + * When headers have been set with `response.setHeader()`, they will be merged + * with any headers passed to `response.writeHead()`, with the headers passed + * to `response.writeHead()` given precedence. + * + * ```js + * // Returns content-type = text/plain + * const server = http2.createServer((req, res) => { + * res.setHeader('Content-Type', 'text/html; charset=utf-8'); + * res.setHeader('X-Foo', 'bar'); + * res.writeHead(200, { 'Content-Type': 'text/plain; charset=utf-8' }); + * res.end('ok'); + * }); + * ``` + * + * Attempting to set a header field name or value that contains invalid characters + * will result in a `TypeError` being thrown. + * @since v8.4.0 + */ + writeHead(statusCode: number, headers?: OutgoingHttpHeaders): this; + writeHead(statusCode: number, statusMessage: string, headers?: OutgoingHttpHeaders): this; + /** + * Call `http2stream.pushStream()` with the given headers, and wrap the + * given `Http2Stream` on a newly created `Http2ServerResponse` as the callback + * parameter if successful. When `Http2ServerRequest` is closed, the callback is + * called with an error `ERR_HTTP2_INVALID_STREAM`. + * @since v8.4.0 + * @param headers An object describing the headers + * @param callback Called once `http2stream.pushStream()` is finished, or either when the attempt to create the pushed `Http2Stream` has failed or has been rejected, or the state of + * `Http2ServerRequest` is closed prior to calling the `http2stream.pushStream()` method + */ + createPushResponse(headers: OutgoingHttpHeaders, callback: (err: Error | null, res: Http2ServerResponse) => void): void; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (error: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + addListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'drain'): boolean; + emit(event: 'error', error: Error): boolean; + emit(event: 'finish'): boolean; + emit(event: 'pipe', src: stream.Readable): boolean; + emit(event: 'unpipe', src: stream.Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (error: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'pipe', listener: (src: stream.Readable) => void): this; + on(event: 'unpipe', listener: (src: stream.Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (error: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'pipe', listener: (src: stream.Readable) => void): this; + once(event: 'unpipe', listener: (src: stream.Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (error: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (error: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'pipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + export namespace constants { + const NGHTTP2_SESSION_SERVER: number; + const NGHTTP2_SESSION_CLIENT: number; + const NGHTTP2_STREAM_STATE_IDLE: number; + const NGHTTP2_STREAM_STATE_OPEN: number; + const NGHTTP2_STREAM_STATE_RESERVED_LOCAL: number; + const NGHTTP2_STREAM_STATE_RESERVED_REMOTE: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_LOCAL: number; + const NGHTTP2_STREAM_STATE_HALF_CLOSED_REMOTE: number; + const NGHTTP2_STREAM_STATE_CLOSED: number; + const NGHTTP2_NO_ERROR: number; + const NGHTTP2_PROTOCOL_ERROR: number; + const NGHTTP2_INTERNAL_ERROR: number; + const NGHTTP2_FLOW_CONTROL_ERROR: number; + const NGHTTP2_SETTINGS_TIMEOUT: number; + const NGHTTP2_STREAM_CLOSED: number; + const NGHTTP2_FRAME_SIZE_ERROR: number; + const NGHTTP2_REFUSED_STREAM: number; + const NGHTTP2_CANCEL: number; + const NGHTTP2_COMPRESSION_ERROR: number; + const NGHTTP2_CONNECT_ERROR: number; + const NGHTTP2_ENHANCE_YOUR_CALM: number; + const NGHTTP2_INADEQUATE_SECURITY: number; + const NGHTTP2_HTTP_1_1_REQUIRED: number; + const NGHTTP2_ERR_FRAME_SIZE_ERROR: number; + const NGHTTP2_FLAG_NONE: number; + const NGHTTP2_FLAG_END_STREAM: number; + const NGHTTP2_FLAG_END_HEADERS: number; + const NGHTTP2_FLAG_ACK: number; + const NGHTTP2_FLAG_PADDED: number; + const NGHTTP2_FLAG_PRIORITY: number; + const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: number; + const DEFAULT_SETTINGS_ENABLE_PUSH: number; + const DEFAULT_SETTINGS_INITIAL_WINDOW_SIZE: number; + const DEFAULT_SETTINGS_MAX_FRAME_SIZE: number; + const MAX_MAX_FRAME_SIZE: number; + const MIN_MAX_FRAME_SIZE: number; + const MAX_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_DEFAULT_WEIGHT: number; + const NGHTTP2_SETTINGS_HEADER_TABLE_SIZE: number; + const NGHTTP2_SETTINGS_ENABLE_PUSH: number; + const NGHTTP2_SETTINGS_MAX_CONCURRENT_STREAMS: number; + const NGHTTP2_SETTINGS_INITIAL_WINDOW_SIZE: number; + const NGHTTP2_SETTINGS_MAX_FRAME_SIZE: number; + const NGHTTP2_SETTINGS_MAX_HEADER_LIST_SIZE: number; + const PADDING_STRATEGY_NONE: number; + const PADDING_STRATEGY_MAX: number; + const PADDING_STRATEGY_CALLBACK: number; + const HTTP2_HEADER_STATUS: string; + const HTTP2_HEADER_METHOD: string; + const HTTP2_HEADER_AUTHORITY: string; + const HTTP2_HEADER_SCHEME: string; + const HTTP2_HEADER_PATH: string; + const HTTP2_HEADER_ACCEPT_CHARSET: string; + const HTTP2_HEADER_ACCEPT_ENCODING: string; + const HTTP2_HEADER_ACCEPT_LANGUAGE: string; + const HTTP2_HEADER_ACCEPT_RANGES: string; + const HTTP2_HEADER_ACCEPT: string; + const HTTP2_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN: string; + const HTTP2_HEADER_AGE: string; + const HTTP2_HEADER_ALLOW: string; + const HTTP2_HEADER_AUTHORIZATION: string; + const HTTP2_HEADER_CACHE_CONTROL: string; + const HTTP2_HEADER_CONNECTION: string; + const HTTP2_HEADER_CONTENT_DISPOSITION: string; + const HTTP2_HEADER_CONTENT_ENCODING: string; + const HTTP2_HEADER_CONTENT_LANGUAGE: string; + const HTTP2_HEADER_CONTENT_LENGTH: string; + const HTTP2_HEADER_CONTENT_LOCATION: string; + const HTTP2_HEADER_CONTENT_MD5: string; + const HTTP2_HEADER_CONTENT_RANGE: string; + const HTTP2_HEADER_CONTENT_TYPE: string; + const HTTP2_HEADER_COOKIE: string; + const HTTP2_HEADER_DATE: string; + const HTTP2_HEADER_ETAG: string; + const HTTP2_HEADER_EXPECT: string; + const HTTP2_HEADER_EXPIRES: string; + const HTTP2_HEADER_FROM: string; + const HTTP2_HEADER_HOST: string; + const HTTP2_HEADER_IF_MATCH: string; + const HTTP2_HEADER_IF_MODIFIED_SINCE: string; + const HTTP2_HEADER_IF_NONE_MATCH: string; + const HTTP2_HEADER_IF_RANGE: string; + const HTTP2_HEADER_IF_UNMODIFIED_SINCE: string; + const HTTP2_HEADER_LAST_MODIFIED: string; + const HTTP2_HEADER_LINK: string; + const HTTP2_HEADER_LOCATION: string; + const HTTP2_HEADER_MAX_FORWARDS: string; + const HTTP2_HEADER_PREFER: string; + const HTTP2_HEADER_PROXY_AUTHENTICATE: string; + const HTTP2_HEADER_PROXY_AUTHORIZATION: string; + const HTTP2_HEADER_RANGE: string; + const HTTP2_HEADER_REFERER: string; + const HTTP2_HEADER_REFRESH: string; + const HTTP2_HEADER_RETRY_AFTER: string; + const HTTP2_HEADER_SERVER: string; + const HTTP2_HEADER_SET_COOKIE: string; + const HTTP2_HEADER_STRICT_TRANSPORT_SECURITY: string; + const HTTP2_HEADER_TRANSFER_ENCODING: string; + const HTTP2_HEADER_TE: string; + const HTTP2_HEADER_UPGRADE: string; + const HTTP2_HEADER_USER_AGENT: string; + const HTTP2_HEADER_VARY: string; + const HTTP2_HEADER_VIA: string; + const HTTP2_HEADER_WWW_AUTHENTICATE: string; + const HTTP2_HEADER_HTTP2_SETTINGS: string; + const HTTP2_HEADER_KEEP_ALIVE: string; + const HTTP2_HEADER_PROXY_CONNECTION: string; + const HTTP2_METHOD_ACL: string; + const HTTP2_METHOD_BASELINE_CONTROL: string; + const HTTP2_METHOD_BIND: string; + const HTTP2_METHOD_CHECKIN: string; + const HTTP2_METHOD_CHECKOUT: string; + const HTTP2_METHOD_CONNECT: string; + const HTTP2_METHOD_COPY: string; + const HTTP2_METHOD_DELETE: string; + const HTTP2_METHOD_GET: string; + const HTTP2_METHOD_HEAD: string; + const HTTP2_METHOD_LABEL: string; + const HTTP2_METHOD_LINK: string; + const HTTP2_METHOD_LOCK: string; + const HTTP2_METHOD_MERGE: string; + const HTTP2_METHOD_MKACTIVITY: string; + const HTTP2_METHOD_MKCALENDAR: string; + const HTTP2_METHOD_MKCOL: string; + const HTTP2_METHOD_MKREDIRECTREF: string; + const HTTP2_METHOD_MKWORKSPACE: string; + const HTTP2_METHOD_MOVE: string; + const HTTP2_METHOD_OPTIONS: string; + const HTTP2_METHOD_ORDERPATCH: string; + const HTTP2_METHOD_PATCH: string; + const HTTP2_METHOD_POST: string; + const HTTP2_METHOD_PRI: string; + const HTTP2_METHOD_PROPFIND: string; + const HTTP2_METHOD_PROPPATCH: string; + const HTTP2_METHOD_PUT: string; + const HTTP2_METHOD_REBIND: string; + const HTTP2_METHOD_REPORT: string; + const HTTP2_METHOD_SEARCH: string; + const HTTP2_METHOD_TRACE: string; + const HTTP2_METHOD_UNBIND: string; + const HTTP2_METHOD_UNCHECKOUT: string; + const HTTP2_METHOD_UNLINK: string; + const HTTP2_METHOD_UNLOCK: string; + const HTTP2_METHOD_UPDATE: string; + const HTTP2_METHOD_UPDATEREDIRECTREF: string; + const HTTP2_METHOD_VERSION_CONTROL: string; + const HTTP_STATUS_CONTINUE: number; + const HTTP_STATUS_SWITCHING_PROTOCOLS: number; + const HTTP_STATUS_PROCESSING: number; + const HTTP_STATUS_OK: number; + const HTTP_STATUS_CREATED: number; + const HTTP_STATUS_ACCEPTED: number; + const HTTP_STATUS_NON_AUTHORITATIVE_INFORMATION: number; + const HTTP_STATUS_NO_CONTENT: number; + const HTTP_STATUS_RESET_CONTENT: number; + const HTTP_STATUS_PARTIAL_CONTENT: number; + const HTTP_STATUS_MULTI_STATUS: number; + const HTTP_STATUS_ALREADY_REPORTED: number; + const HTTP_STATUS_IM_USED: number; + const HTTP_STATUS_MULTIPLE_CHOICES: number; + const HTTP_STATUS_MOVED_PERMANENTLY: number; + const HTTP_STATUS_FOUND: number; + const HTTP_STATUS_SEE_OTHER: number; + const HTTP_STATUS_NOT_MODIFIED: number; + const HTTP_STATUS_USE_PROXY: number; + const HTTP_STATUS_TEMPORARY_REDIRECT: number; + const HTTP_STATUS_PERMANENT_REDIRECT: number; + const HTTP_STATUS_BAD_REQUEST: number; + const HTTP_STATUS_UNAUTHORIZED: number; + const HTTP_STATUS_PAYMENT_REQUIRED: number; + const HTTP_STATUS_FORBIDDEN: number; + const HTTP_STATUS_NOT_FOUND: number; + const HTTP_STATUS_METHOD_NOT_ALLOWED: number; + const HTTP_STATUS_NOT_ACCEPTABLE: number; + const HTTP_STATUS_PROXY_AUTHENTICATION_REQUIRED: number; + const HTTP_STATUS_REQUEST_TIMEOUT: number; + const HTTP_STATUS_CONFLICT: number; + const HTTP_STATUS_GONE: number; + const HTTP_STATUS_LENGTH_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_FAILED: number; + const HTTP_STATUS_PAYLOAD_TOO_LARGE: number; + const HTTP_STATUS_URI_TOO_LONG: number; + const HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE: number; + const HTTP_STATUS_RANGE_NOT_SATISFIABLE: number; + const HTTP_STATUS_EXPECTATION_FAILED: number; + const HTTP_STATUS_TEAPOT: number; + const HTTP_STATUS_MISDIRECTED_REQUEST: number; + const HTTP_STATUS_UNPROCESSABLE_ENTITY: number; + const HTTP_STATUS_LOCKED: number; + const HTTP_STATUS_FAILED_DEPENDENCY: number; + const HTTP_STATUS_UNORDERED_COLLECTION: number; + const HTTP_STATUS_UPGRADE_REQUIRED: number; + const HTTP_STATUS_PRECONDITION_REQUIRED: number; + const HTTP_STATUS_TOO_MANY_REQUESTS: number; + const HTTP_STATUS_REQUEST_HEADER_FIELDS_TOO_LARGE: number; + const HTTP_STATUS_UNAVAILABLE_FOR_LEGAL_REASONS: number; + const HTTP_STATUS_INTERNAL_SERVER_ERROR: number; + const HTTP_STATUS_NOT_IMPLEMENTED: number; + const HTTP_STATUS_BAD_GATEWAY: number; + const HTTP_STATUS_SERVICE_UNAVAILABLE: number; + const HTTP_STATUS_GATEWAY_TIMEOUT: number; + const HTTP_STATUS_HTTP_VERSION_NOT_SUPPORTED: number; + const HTTP_STATUS_VARIANT_ALSO_NEGOTIATES: number; + const HTTP_STATUS_INSUFFICIENT_STORAGE: number; + const HTTP_STATUS_LOOP_DETECTED: number; + const HTTP_STATUS_BANDWIDTH_LIMIT_EXCEEDED: number; + const HTTP_STATUS_NOT_EXTENDED: number; + const HTTP_STATUS_NETWORK_AUTHENTICATION_REQUIRED: number; + } + /** + * This symbol can be set as a property on the HTTP/2 headers object with + * an array value in order to provide a list of headers considered sensitive. + */ + export const sensitiveHeaders: symbol; + /** + * Returns an object containing the default settings for an `Http2Session`instance. This method returns a new object instance every time it is called + * so instances returned may be safely modified for use. + * @since v8.4.0 + */ + export function getDefaultSettings(): Settings; + /** + * Returns a `Buffer` instance containing serialized representation of the given + * HTTP/2 settings as specified in the [HTTP/2](https://tools.ietf.org/html/rfc7540) specification. This is intended + * for use with the `HTTP2-Settings` header field. + * + * ```js + * const http2 = require('http2'); + * + * const packed = http2.getPackedSettings({ enablePush: false }); + * + * console.log(packed.toString('base64')); + * // Prints: AAIAAAAA + * ``` + * @since v8.4.0 + */ + export function getPackedSettings(settings: Settings): Buffer; + /** + * Returns a `HTTP/2 Settings Object` containing the deserialized settings from + * the given `Buffer` as generated by `http2.getPackedSettings()`. + * @since v8.4.0 + * @param buf The packed settings. + */ + export function getUnpackedSettings(buf: Uint8Array): Settings; + /** + * Returns a `net.Server` instance that creates and manages `Http2Session`instances. + * + * Since there are no browsers known that support [unencrypted HTTP/2](https://http2.github.io/faq/#does-http2-require-encryption), the use of {@link createSecureServer} is necessary when + * communicating + * with browser clients. + * + * ```js + * const http2 = require('http2'); + * + * // Create an unencrypted HTTP/2 server. + * // Since there are no browsers known that support + * // unencrypted HTTP/2, the use of `http2.createSecureServer()` + * // is necessary when communicating with browser clients. + * const server = http2.createServer(); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200 + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(80); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createServer(onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2Server; + export function createServer(options: ServerOptions, onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2Server; + /** + * Returns a `tls.Server` instance that creates and manages `Http2Session`instances. + * + * ```js + * const http2 = require('http2'); + * const fs = require('fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem') + * }; + * + * // Create a secure HTTP/2 server + * const server = http2.createSecureServer(options); + * + * server.on('stream', (stream, headers) => { + * stream.respond({ + * 'content-type': 'text/html; charset=utf-8', + * ':status': 200 + * }); + * stream.end('

Hello World

'); + * }); + * + * server.listen(80); + * ``` + * @since v8.4.0 + * @param onRequestHandler See `Compatibility API` + */ + export function createSecureServer(onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2SecureServer; + export function createSecureServer(options: SecureServerOptions, onRequestHandler?: (request: Http2ServerRequest, response: Http2ServerResponse) => void): Http2SecureServer; + /** + * Returns a `ClientHttp2Session` instance. + * + * ```js + * const http2 = require('http2'); + * const client = http2.connect('https://localhost:1234'); + * + * // Use the client + * + * client.close(); + * ``` + * @since v8.4.0 + * @param authority The remote HTTP/2 server to connect to. This must be in the form of a minimal, valid URL with the `http://` or `https://` prefix, host name, and IP port (if a non-default port + * is used). Userinfo (user ID and password), path, querystring, and fragment details in the URL will be ignored. + * @param listener Will be registered as a one-time listener of the {@link 'connect'} event. + */ + export function connect(authority: string | url.URL, listener: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void): ClientHttp2Session; + export function connect( + authority: string | url.URL, + options?: ClientSessionOptions | SecureClientSessionOptions, + listener?: (session: ClientHttp2Session, socket: net.Socket | tls.TLSSocket) => void + ): ClientHttp2Session; +} +declare module 'node:http2' { + export * from 'http2'; +} diff --git a/node_modules/@types/node/ts4.8/https.d.ts b/node_modules/@types/node/ts4.8/https.d.ts new file mode 100755 index 0000000..aae4a95 --- /dev/null +++ b/node_modules/@types/node/ts4.8/https.d.ts @@ -0,0 +1,541 @@ +/** + * HTTPS is the HTTP protocol over TLS/SSL. In Node.js this is implemented as a + * separate module. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/https.js) + */ +declare module 'https' { + import { Duplex } from 'node:stream'; + import * as tls from 'node:tls'; + import * as http from 'node:http'; + import { URL } from 'node:url'; + type ServerOptions< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > = tls.SecureContextOptions & tls.TlsOptions & http.ServerOptions; + type RequestOptions = http.RequestOptions & + tls.SecureContextOptions & { + rejectUnauthorized?: boolean | undefined; // Defaults to true + servername?: string | undefined; // SNI TLS Extension + }; + interface AgentOptions extends http.AgentOptions, tls.ConnectionOptions { + rejectUnauthorized?: boolean | undefined; + maxCachedSessions?: number | undefined; + } + /** + * An `Agent` object for HTTPS similar to `http.Agent`. See {@link request} for more information. + * @since v0.4.5 + */ + class Agent extends http.Agent { + constructor(options?: AgentOptions); + options: AgentOptions; + } + interface Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends http.Server {} + /** + * See `http.Server` for more information. + * @since v0.3.4 + */ + class Server< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + > extends tls.Server { + constructor(requestListener?: http.RequestListener); + constructor( + options: ServerOptions, + requestListener?: http.RequestListener, + ); + /** + * Closes all connections connected to this server. + * @since v18.2.0 + */ + closeAllConnections(): void; + /** + * Closes all connections connected to this server which are not sending a request or waiting for a response. + * @since v18.2.0 + */ + closeIdleConnections(): void; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + addListener( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + addListener( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + addListener( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + addListener(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + addListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connection', listener: (socket: Duplex) => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'checkContinue', listener: http.RequestListener): this; + addListener(event: 'checkExpectation', listener: http.RequestListener): this; + addListener(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + addListener( + event: 'connect', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + addListener(event: 'request', listener: http.RequestListener): this; + addListener( + event: 'upgrade', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + emit(event: string, ...args: any[]): boolean; + emit(event: 'keylog', line: Buffer, tlsSocket: tls.TLSSocket): boolean; + emit( + event: 'newSession', + sessionId: Buffer, + sessionData: Buffer, + callback: (err: Error, resp: Buffer) => void, + ): boolean; + emit( + event: 'OCSPRequest', + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ): boolean; + emit(event: 'resumeSession', sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void): boolean; + emit(event: 'secureConnection', tlsSocket: tls.TLSSocket): boolean; + emit(event: 'tlsClientError', err: Error, tlsSocket: tls.TLSSocket): boolean; + emit(event: 'close'): boolean; + emit(event: 'connection', socket: Duplex): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit( + event: 'checkContinue', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit( + event: 'checkExpectation', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'clientError', err: Error, socket: Duplex): boolean; + emit(event: 'connect', req: InstanceType, socket: Duplex, head: Buffer): boolean; + emit( + event: 'request', + req: InstanceType, + res: InstanceType & { req: InstanceType }, + ): boolean; + emit(event: 'upgrade', req: InstanceType, socket: Duplex, head: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + on( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + on( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + on( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + on(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + on(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connection', listener: (socket: Duplex) => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'checkContinue', listener: http.RequestListener): this; + on(event: 'checkExpectation', listener: http.RequestListener): this; + on(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + on(event: 'connect', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + on(event: 'request', listener: http.RequestListener): this; + on(event: 'upgrade', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + once( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + once( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + once( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + once(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + once(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connection', listener: (socket: Duplex) => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'checkContinue', listener: http.RequestListener): this; + once(event: 'checkExpectation', listener: http.RequestListener): this; + once(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + once(event: 'connect', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + once(event: 'request', listener: http.RequestListener): this; + once(event: 'upgrade', listener: (req: InstanceType, socket: Duplex, head: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependListener( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependListener( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependListener( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependListener(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connection', listener: (socket: Duplex) => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'checkContinue', listener: http.RequestListener): this; + prependListener(event: 'checkExpectation', listener: http.RequestListener): this; + prependListener(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + prependListener( + event: 'connect', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependListener(event: 'request', listener: http.RequestListener): this; + prependListener( + event: 'upgrade', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'keylog', listener: (line: Buffer, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener( + event: 'newSession', + listener: (sessionId: Buffer, sessionData: Buffer, callback: (err: Error, resp: Buffer) => void) => void, + ): this; + prependOnceListener( + event: 'OCSPRequest', + listener: ( + certificate: Buffer, + issuer: Buffer, + callback: (err: Error | null, resp: Buffer) => void, + ) => void, + ): this; + prependOnceListener( + event: 'resumeSession', + listener: (sessionId: Buffer, callback: (err: Error, sessionData: Buffer) => void) => void, + ): this; + prependOnceListener(event: 'secureConnection', listener: (tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: tls.TLSSocket) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connection', listener: (socket: Duplex) => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'checkContinue', listener: http.RequestListener): this; + prependOnceListener(event: 'checkExpectation', listener: http.RequestListener): this; + prependOnceListener(event: 'clientError', listener: (err: Error, socket: Duplex) => void): this; + prependOnceListener( + event: 'connect', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + prependOnceListener(event: 'request', listener: http.RequestListener): this; + prependOnceListener( + event: 'upgrade', + listener: (req: InstanceType, socket: Duplex, head: Buffer) => void, + ): this; + } + /** + * ```js + * // curl -k https://localhost:8000/ + * const https = require('https'); + * const fs = require('fs'); + * + * const options = { + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem') + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * + * Or + * + * ```js + * const https = require('https'); + * const fs = require('fs'); + * + * const options = { + * pfx: fs.readFileSync('test/fixtures/test_cert.pfx'), + * passphrase: 'sample' + * }; + * + * https.createServer(options, (req, res) => { + * res.writeHead(200); + * res.end('hello world\n'); + * }).listen(8000); + * ``` + * @since v0.3.4 + * @param options Accepts `options` from `createServer`, `createSecureContext` and `createServer`. + * @param requestListener A listener to be added to the `'request'` event. + */ + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >(requestListener?: http.RequestListener): Server; + function createServer< + Request extends typeof http.IncomingMessage = typeof http.IncomingMessage, + Response extends typeof http.ServerResponse = typeof http.ServerResponse, + >( + options: ServerOptions, + requestListener?: http.RequestListener, + ): Server; + /** + * Makes a request to a secure web server. + * + * The following additional `options` from `tls.connect()` are also accepted:`ca`, `cert`, `ciphers`, `clientCertEngine`, `crl`, `dhparam`, `ecdhCurve`,`honorCipherOrder`, `key`, `passphrase`, + * `pfx`, `rejectUnauthorized`,`secureOptions`, `secureProtocol`, `servername`, `sessionIdContext`,`highWaterMark`. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * `https.request()` returns an instance of the `http.ClientRequest` class. The `ClientRequest` instance is a writable stream. If one needs to + * upload a file with a POST request, then write to the `ClientRequest` object. + * + * ```js + * const https = require('https'); + * + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET' + * }; + * + * const req = https.request(options, (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * }); + * + * req.on('error', (e) => { + * console.error(e); + * }); + * req.end(); + * ``` + * + * Example using options from `tls.connect()`: + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem') + * }; + * options.agent = new https.Agent(options); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Alternatively, opt out of connection pooling by not using an `Agent`. + * + * ```js + * const options = { + * hostname: 'encrypted.google.com', + * port: 443, + * path: '/', + * method: 'GET', + * key: fs.readFileSync('test/fixtures/keys/agent2-key.pem'), + * cert: fs.readFileSync('test/fixtures/keys/agent2-cert.pem'), + * agent: false + * }; + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example using a `URL` as `options`: + * + * ```js + * const options = new URL('https://abc:xyz@example.com'); + * + * const req = https.request(options, (res) => { + * // ... + * }); + * ``` + * + * Example pinning on certificate fingerprint, or the public key (similar to`pin-sha256`): + * + * ```js + * const tls = require('tls'); + * const https = require('https'); + * const crypto = require('crypto'); + * + * function sha256(s) { + * return crypto.createHash('sha256').update(s).digest('base64'); + * } + * const options = { + * hostname: 'github.com', + * port: 443, + * path: '/', + * method: 'GET', + * checkServerIdentity: function(host, cert) { + * // Make sure the certificate is issued to the host we are connected to + * const err = tls.checkServerIdentity(host, cert); + * if (err) { + * return err; + * } + * + * // Pin the public key, similar to HPKP pin-sha25 pinning + * const pubkey256 = 'pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU='; + * if (sha256(cert.pubkey) !== pubkey256) { + * const msg = 'Certificate verification error: ' + + * `The public key of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // Pin the exact certificate, rather than the pub key + * const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' + + * 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16'; + * if (cert.fingerprint256 !== cert256) { + * const msg = 'Certificate verification error: ' + + * `The certificate of '${cert.subject.CN}' ` + + * 'does not match our pinned fingerprint'; + * return new Error(msg); + * } + * + * // This loop is informational only. + * // Print the certificate and public key fingerprints of all certs in the + * // chain. Its common to pin the public key of the issuer on the public + * // internet, while pinning the public key of the service in sensitive + * // environments. + * do { + * console.log('Subject Common Name:', cert.subject.CN); + * console.log(' Certificate SHA256 fingerprint:', cert.fingerprint256); + * + * hash = crypto.createHash('sha256'); + * console.log(' Public key ping-sha256:', sha256(cert.pubkey)); + * + * lastprint256 = cert.fingerprint256; + * cert = cert.issuerCertificate; + * } while (cert.fingerprint256 !== lastprint256); + * + * }, + * }; + * + * options.agent = new https.Agent(options); + * const req = https.request(options, (res) => { + * console.log('All OK. Server matched our pinned cert or public key'); + * console.log('statusCode:', res.statusCode); + * // Print the HPKP values + * console.log('headers:', res.headers['public-key-pins']); + * + * res.on('data', (d) => {}); + * }); + * + * req.on('error', (e) => { + * console.error(e.message); + * }); + * req.end(); + * ``` + * + * Outputs for example: + * + * ```text + * Subject Common Name: github.com + * Certificate SHA256 fingerprint: 25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16 + * Public key ping-sha256: pL1+qb9HTMRZJmuC/bB/ZI9d302BYrrqiVuRyW+DGrU= + * Subject Common Name: DigiCert SHA2 Extended Validation Server CA + * Certificate SHA256 fingerprint: 40:3E:06:2A:26:53:05:91:13:28:5B:AF:80:A0:D4:AE:42:2C:84:8C:9F:78:FA:D0:1F:C9:4B:C5:B8:7F:EF:1A + * Public key ping-sha256: RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho= + * Subject Common Name: DigiCert High Assurance EV Root CA + * Certificate SHA256 fingerprint: 74:31:E5:F4:C3:C1:CE:46:90:77:4F:0B:61:E0:54:40:88:3B:A9:A0:1E:D0:0B:A6:AB:D7:80:6E:D3:B1:18:CF + * Public key ping-sha256: WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18= + * All OK. Server matched our pinned cert or public key + * statusCode: 200 + * headers: max-age=0; pin-sha256="WoiWRyIOVNa9ihaBciRSC7XHjliYS9VwUGOIud4PB18="; pin-sha256="RRM1dGqnDFsCJXBTHky16vi1obOlCgFFn/yOhI/y+ho="; + * pin-sha256="k2v657xBsOVe1PQRwOsHsw3bsGT2VzIqz5K+59sNQws="; pin-sha256="K87oWBWM9UZfyddvDfoxL+8lpNyoUB2ptGtn0fv6G2Q="; pin-sha256="IQBnNBEiFuhj+8x6X8XLgh01V9Ic5/V3IRQLNFFc7v4="; + * pin-sha256="iie1VXtL7HzAMF+/PVPR9xzT80kQxdZeJ+zduCB3uj0="; pin-sha256="LvRiGEjRqfzurezaWuj8Wie2gyHMrW5Q06LspMnox7A="; includeSubDomains + * ``` + * @since v0.3.6 + * @param options Accepts all `options` from `request`, with some differences in default values: + */ + function request( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function request( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + /** + * Like `http.get()` but for HTTPS. + * + * `options` can be an object, a string, or a `URL` object. If `options` is a + * string, it is automatically parsed with `new URL()`. If it is a `URL` object, it will be automatically converted to an ordinary `options` object. + * + * ```js + * const https = require('https'); + * + * https.get('https://encrypted.google.com/', (res) => { + * console.log('statusCode:', res.statusCode); + * console.log('headers:', res.headers); + * + * res.on('data', (d) => { + * process.stdout.write(d); + * }); + * + * }).on('error', (e) => { + * console.error(e); + * }); + * ``` + * @since v0.3.6 + * @param options Accepts the same `options` as {@link request}, with the `method` always set to `GET`. + */ + function get( + options: RequestOptions | string | URL, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + function get( + url: string | URL, + options: RequestOptions, + callback?: (res: http.IncomingMessage) => void, + ): http.ClientRequest; + let globalAgent: Agent; +} +declare module 'node:https' { + export * from 'https'; +} diff --git a/node_modules/@types/node/ts4.8/index.d.ts b/node_modules/@types/node/ts4.8/index.d.ts new file mode 100755 index 0000000..7c8b38c --- /dev/null +++ b/node_modules/@types/node/ts4.8/index.d.ts @@ -0,0 +1,88 @@ +/** + * License for programmatically and manually incorporated + * documentation aka. `JSDoc` from https://github.com/nodejs/node/tree/master/doc + * + * Copyright Node.js contributors. All rights reserved. + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + */ + +// NOTE: These definitions support NodeJS and TypeScript 4.8 and earlier. + +// Reference required types from the default lib: +/// +/// +/// +/// + +// Base definitions for all NodeJS modules that are not specific to any version of TypeScript: +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// +/// + +/// diff --git a/node_modules/@types/node/ts4.8/inspector.d.ts b/node_modules/@types/node/ts4.8/inspector.d.ts new file mode 100755 index 0000000..eba0b55 --- /dev/null +++ b/node_modules/@types/node/ts4.8/inspector.d.ts @@ -0,0 +1,2741 @@ +// eslint-disable-next-line dt-header +// Type definitions for inspector + +// These definitions are auto-generated. +// Please see https://github.com/DefinitelyTyped/DefinitelyTyped/pull/19330 +// for more information. + +// tslint:disable:max-line-length + +/** + * The `inspector` module provides an API for interacting with the V8 inspector. + * + * It can be accessed using: + * + * ```js + * const inspector = require('inspector'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/inspector.js) + */ +declare module 'inspector' { + import EventEmitter = require('node:events'); + interface InspectorNotification { + method: string; + params: T; + } + namespace Schema { + /** + * Description of the protocol domain. + */ + interface Domain { + /** + * Domain name. + */ + name: string; + /** + * Domain version. + */ + version: string; + } + interface GetDomainsReturnType { + /** + * List of supported domains. + */ + domains: Domain[]; + } + } + namespace Runtime { + /** + * Unique script identifier. + */ + type ScriptId = string; + /** + * Unique object identifier. + */ + type RemoteObjectId = string; + /** + * Primitive value which cannot be JSON-stringified. + */ + type UnserializableValue = string; + /** + * Mirror object referencing original JavaScript object. + */ + interface RemoteObject { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * Object class (constructor) name. Specified for object type values only. + */ + className?: string | undefined; + /** + * Remote object value in case of primitive values or JSON values (if it was requested). + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified does not have value, but gets this property. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * Unique object identifier (for non-primitive values). + */ + objectId?: RemoteObjectId | undefined; + /** + * Preview containing abbreviated property values. Specified for object type values only. + * @experimental + */ + preview?: ObjectPreview | undefined; + /** + * @experimental + */ + customPreview?: CustomPreview | undefined; + } + /** + * @experimental + */ + interface CustomPreview { + header: string; + hasBody: boolean; + formatterObjectId: RemoteObjectId; + bindRemoteObjectFunctionId: RemoteObjectId; + configObjectId?: RemoteObjectId | undefined; + } + /** + * Object containing abbreviated remote object value. + * @experimental + */ + interface ObjectPreview { + /** + * Object type. + */ + type: string; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + /** + * String representation of the object. + */ + description?: string | undefined; + /** + * True iff some of the properties or entries of the original object did not fit. + */ + overflow: boolean; + /** + * List of the properties. + */ + properties: PropertyPreview[]; + /** + * List of the entries. Specified for map and set subtype values only. + */ + entries?: EntryPreview[] | undefined; + } + /** + * @experimental + */ + interface PropertyPreview { + /** + * Property name. + */ + name: string; + /** + * Object type. Accessor means that the property itself is an accessor property. + */ + type: string; + /** + * User-friendly property value string. + */ + value?: string | undefined; + /** + * Nested value preview. + */ + valuePreview?: ObjectPreview | undefined; + /** + * Object subtype hint. Specified for object type values only. + */ + subtype?: string | undefined; + } + /** + * @experimental + */ + interface EntryPreview { + /** + * Preview of the key. Specified for map-like collection entries. + */ + key?: ObjectPreview | undefined; + /** + * Preview of the value. + */ + value: ObjectPreview; + } + /** + * Object property descriptor. + */ + interface PropertyDescriptor { + /** + * Property name or symbol description. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + /** + * True if the value associated with the property may be changed (data descriptors only). + */ + writable?: boolean | undefined; + /** + * A function which serves as a getter for the property, or undefined if there is no getter (accessor descriptors only). + */ + get?: RemoteObject | undefined; + /** + * A function which serves as a setter for the property, or undefined if there is no setter (accessor descriptors only). + */ + set?: RemoteObject | undefined; + /** + * True if the type of this property descriptor may be changed and if the property may be deleted from the corresponding object. + */ + configurable: boolean; + /** + * True if this property shows up during enumeration of the properties on the corresponding object. + */ + enumerable: boolean; + /** + * True if the result was thrown during the evaluation. + */ + wasThrown?: boolean | undefined; + /** + * True if the property is owned for the object. + */ + isOwn?: boolean | undefined; + /** + * Property symbol object, if the property is of the symbol type. + */ + symbol?: RemoteObject | undefined; + } + /** + * Object internal property descriptor. This property isn't normally visible in JavaScript code. + */ + interface InternalPropertyDescriptor { + /** + * Conventional property name. + */ + name: string; + /** + * The value associated with the property. + */ + value?: RemoteObject | undefined; + } + /** + * Represents function call argument. Either remote object id objectId, primitive value, unserializable primitive value or neither of (for undefined) them should be specified. + */ + interface CallArgument { + /** + * Primitive value or serializable javascript object. + */ + value?: any; + /** + * Primitive value which can not be JSON-stringified. + */ + unserializableValue?: UnserializableValue | undefined; + /** + * Remote object handle. + */ + objectId?: RemoteObjectId | undefined; + } + /** + * Id of an execution context. + */ + type ExecutionContextId = number; + /** + * Description of an isolated world. + */ + interface ExecutionContextDescription { + /** + * Unique id of the execution context. It can be used to specify in which execution context script evaluation should be performed. + */ + id: ExecutionContextId; + /** + * Execution context origin. + */ + origin: string; + /** + * Human readable name describing given context. + */ + name: string; + /** + * Embedder-specific auxiliary data. + */ + auxData?: {} | undefined; + } + /** + * Detailed information about exception (or error) that was thrown during script compilation or execution. + */ + interface ExceptionDetails { + /** + * Exception id. + */ + exceptionId: number; + /** + * Exception text, which should be used together with exception object when available. + */ + text: string; + /** + * Line number of the exception location (0-based). + */ + lineNumber: number; + /** + * Column number of the exception location (0-based). + */ + columnNumber: number; + /** + * Script ID of the exception location. + */ + scriptId?: ScriptId | undefined; + /** + * URL of the exception location, to be used when the script was not reported. + */ + url?: string | undefined; + /** + * JavaScript stack trace if available. + */ + stackTrace?: StackTrace | undefined; + /** + * Exception object if available. + */ + exception?: RemoteObject | undefined; + /** + * Identifier of the context where exception happened. + */ + executionContextId?: ExecutionContextId | undefined; + } + /** + * Number of milliseconds since epoch. + */ + type Timestamp = number; + /** + * Stack entry for runtime errors and assertions. + */ + interface CallFrame { + /** + * JavaScript function name. + */ + functionName: string; + /** + * JavaScript script id. + */ + scriptId: ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * JavaScript script line number (0-based). + */ + lineNumber: number; + /** + * JavaScript script column number (0-based). + */ + columnNumber: number; + } + /** + * Call frames for assertions or error messages. + */ + interface StackTrace { + /** + * String label of this stack trace. For async traces this may be a name of the function that initiated the async call. + */ + description?: string | undefined; + /** + * JavaScript function name. + */ + callFrames: CallFrame[]; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + */ + parent?: StackTrace | undefined; + /** + * Asynchronous JavaScript stack trace that preceded this stack, if available. + * @experimental + */ + parentId?: StackTraceId | undefined; + } + /** + * Unique identifier of current debugger. + * @experimental + */ + type UniqueDebuggerId = string; + /** + * If debuggerId is set stack trace comes from another debugger and can be resolved there. This allows to track cross-debugger calls. See Runtime.StackTrace and Debugger.paused for usages. + * @experimental + */ + interface StackTraceId { + id: string; + debuggerId?: UniqueDebuggerId | undefined; + } + interface EvaluateParameterType { + /** + * Expression to evaluate. + */ + expression: string; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + contextId?: ExecutionContextId | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface AwaitPromiseParameterType { + /** + * Identifier of the promise. + */ + promiseObjectId: RemoteObjectId; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + } + interface CallFunctionOnParameterType { + /** + * Declaration of the function to call. + */ + functionDeclaration: string; + /** + * Identifier of the object to call function on. Either objectId or executionContextId should be specified. + */ + objectId?: RemoteObjectId | undefined; + /** + * Call arguments. All call arguments must belong to the same JavaScript world as the target object. + */ + arguments?: CallArgument[] | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should be treated as initiated by user in the UI. + */ + userGesture?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + /** + * Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. + */ + objectGroup?: string | undefined; + } + interface GetPropertiesParameterType { + /** + * Identifier of the object to return properties for. + */ + objectId: RemoteObjectId; + /** + * If true, returns properties belonging only to the element itself, not to its prototype chain. + */ + ownProperties?: boolean | undefined; + /** + * If true, returns accessor properties (with getter/setter) only; internal properties are not returned either. + * @experimental + */ + accessorPropertiesOnly?: boolean | undefined; + /** + * Whether preview should be generated for the results. + * @experimental + */ + generatePreview?: boolean | undefined; + } + interface ReleaseObjectParameterType { + /** + * Identifier of the object to release. + */ + objectId: RemoteObjectId; + } + interface ReleaseObjectGroupParameterType { + /** + * Symbolic object group name. + */ + objectGroup: string; + } + interface SetCustomObjectFormatterEnabledParameterType { + enabled: boolean; + } + interface CompileScriptParameterType { + /** + * Expression to compile. + */ + expression: string; + /** + * Source url to be set for the script. + */ + sourceURL: string; + /** + * Specifies whether the compiled script should be persisted. + */ + persistScript: boolean; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface RunScriptParameterType { + /** + * Id of the script to run. + */ + scriptId: ScriptId; + /** + * Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page. + */ + executionContextId?: ExecutionContextId | undefined; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Determines whether Command Line API should be available during the evaluation. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object which should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + */ + generatePreview?: boolean | undefined; + /** + * Whether execution should await for resulting value and return once awaited promise is resolved. + */ + awaitPromise?: boolean | undefined; + } + interface QueryObjectsParameterType { + /** + * Identifier of the prototype to return objects for. + */ + prototypeObjectId: RemoteObjectId; + } + interface GlobalLexicalScopeNamesParameterType { + /** + * Specifies in which execution context to lookup global scope variables. + */ + executionContextId?: ExecutionContextId | undefined; + } + interface EvaluateReturnType { + /** + * Evaluation result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface AwaitPromiseReturnType { + /** + * Promise result. Will contain rejected value if promise was rejected. + */ + result: RemoteObject; + /** + * Exception details if stack strace is available. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CallFunctionOnReturnType { + /** + * Call result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface GetPropertiesReturnType { + /** + * Object properties. + */ + result: PropertyDescriptor[]; + /** + * Internal object properties (only of the element itself). + */ + internalProperties?: InternalPropertyDescriptor[] | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface CompileScriptReturnType { + /** + * Id of the script. + */ + scriptId?: ScriptId | undefined; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface RunScriptReturnType { + /** + * Run result. + */ + result: RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: ExceptionDetails | undefined; + } + interface QueryObjectsReturnType { + /** + * Array with objects. + */ + objects: RemoteObject; + } + interface GlobalLexicalScopeNamesReturnType { + names: string[]; + } + interface ExecutionContextCreatedEventDataType { + /** + * A newly created execution context. + */ + context: ExecutionContextDescription; + } + interface ExecutionContextDestroyedEventDataType { + /** + * Id of the destroyed context + */ + executionContextId: ExecutionContextId; + } + interface ExceptionThrownEventDataType { + /** + * Timestamp of the exception. + */ + timestamp: Timestamp; + exceptionDetails: ExceptionDetails; + } + interface ExceptionRevokedEventDataType { + /** + * Reason describing why exception was revoked. + */ + reason: string; + /** + * The id of revoked exception, as reported in exceptionThrown. + */ + exceptionId: number; + } + interface ConsoleAPICalledEventDataType { + /** + * Type of the call. + */ + type: string; + /** + * Call arguments. + */ + args: RemoteObject[]; + /** + * Identifier of the context where the call was made. + */ + executionContextId: ExecutionContextId; + /** + * Call timestamp. + */ + timestamp: Timestamp; + /** + * Stack trace captured when the call was made. + */ + stackTrace?: StackTrace | undefined; + /** + * Console context descriptor for calls on non-default console context (not console.*): 'anonymous#unique-logger-id' for call on unnamed context, 'name#unique-logger-id' for call on named context. + * @experimental + */ + context?: string | undefined; + } + interface InspectRequestedEventDataType { + object: RemoteObject; + hints: {}; + } + } + namespace Debugger { + /** + * Breakpoint identifier. + */ + type BreakpointId = string; + /** + * Call frame identifier. + */ + type CallFrameId = string; + /** + * Location in the source code. + */ + interface Location { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + } + /** + * Location in the source code. + * @experimental + */ + interface ScriptPosition { + lineNumber: number; + columnNumber: number; + } + /** + * JavaScript call frame. Array of call frames form the call stack. + */ + interface CallFrame { + /** + * Call frame identifier. This identifier is only valid while the virtual machine is paused. + */ + callFrameId: CallFrameId; + /** + * Name of the JavaScript function called on this call frame. + */ + functionName: string; + /** + * Location in the source code. + */ + functionLocation?: Location | undefined; + /** + * Location in the source code. + */ + location: Location; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Scope chain for this call frame. + */ + scopeChain: Scope[]; + /** + * this object for this call frame. + */ + this: Runtime.RemoteObject; + /** + * The value being returned, if the function is at return point. + */ + returnValue?: Runtime.RemoteObject | undefined; + } + /** + * Scope description. + */ + interface Scope { + /** + * Scope type. + */ + type: string; + /** + * Object representing the scope. For global and with scopes it represents the actual object; for the rest of the scopes, it is artificial transient object enumerating scope variables as its properties. + */ + object: Runtime.RemoteObject; + name?: string | undefined; + /** + * Location in the source code where scope starts + */ + startLocation?: Location | undefined; + /** + * Location in the source code where scope ends + */ + endLocation?: Location | undefined; + } + /** + * Search match for resource. + */ + interface SearchMatch { + /** + * Line number in resource content. + */ + lineNumber: number; + /** + * Line with match content. + */ + lineContent: string; + } + interface BreakLocation { + /** + * Script identifier as reported in the Debugger.scriptParsed. + */ + scriptId: Runtime.ScriptId; + /** + * Line number in the script (0-based). + */ + lineNumber: number; + /** + * Column number in the script (0-based). + */ + columnNumber?: number | undefined; + type?: string | undefined; + } + interface SetBreakpointsActiveParameterType { + /** + * New value for breakpoints active state. + */ + active: boolean; + } + interface SetSkipAllPausesParameterType { + /** + * New value for skip pauses state. + */ + skip: boolean; + } + interface SetBreakpointByUrlParameterType { + /** + * Line number to set breakpoint at. + */ + lineNumber: number; + /** + * URL of the resources to set breakpoint on. + */ + url?: string | undefined; + /** + * Regex pattern for the URLs of the resources to set breakpoints on. Either url or urlRegex must be specified. + */ + urlRegex?: string | undefined; + /** + * Script hash of the resources to set breakpoint on. + */ + scriptHash?: string | undefined; + /** + * Offset in the line to set breakpoint at. + */ + columnNumber?: number | undefined; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface SetBreakpointParameterType { + /** + * Location to set breakpoint in. + */ + location: Location; + /** + * Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true. + */ + condition?: string | undefined; + } + interface RemoveBreakpointParameterType { + breakpointId: BreakpointId; + } + interface GetPossibleBreakpointsParameterType { + /** + * Start of range to search possible breakpoint locations in. + */ + start: Location; + /** + * End of range to search possible breakpoint locations in (excluding). When not specified, end of scripts is used as end of range. + */ + end?: Location | undefined; + /** + * Only consider locations which are in the same (non-nested) function as start. + */ + restrictToFunction?: boolean | undefined; + } + interface ContinueToLocationParameterType { + /** + * Location to continue to. + */ + location: Location; + targetCallFrames?: string | undefined; + } + interface PauseOnAsyncCallParameterType { + /** + * Debugger will pause when async call with given stack trace is started. + */ + parentStackTraceId: Runtime.StackTraceId; + } + interface StepIntoParameterType { + /** + * Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause. + * @experimental + */ + breakOnAsyncCall?: boolean | undefined; + } + interface GetStackTraceParameterType { + stackTraceId: Runtime.StackTraceId; + } + interface SearchInContentParameterType { + /** + * Id of the script to search in. + */ + scriptId: Runtime.ScriptId; + /** + * String to search for. + */ + query: string; + /** + * If true, search is case sensitive. + */ + caseSensitive?: boolean | undefined; + /** + * If true, treats string parameter as regex. + */ + isRegex?: boolean | undefined; + } + interface SetScriptSourceParameterType { + /** + * Id of the script to edit. + */ + scriptId: Runtime.ScriptId; + /** + * New content of the script. + */ + scriptSource: string; + /** + * If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + */ + dryRun?: boolean | undefined; + } + interface RestartFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + } + interface GetScriptSourceParameterType { + /** + * Id of the script to get source for. + */ + scriptId: Runtime.ScriptId; + } + interface SetPauseOnExceptionsParameterType { + /** + * Pause on exceptions mode. + */ + state: string; + } + interface EvaluateOnCallFrameParameterType { + /** + * Call frame identifier to evaluate on. + */ + callFrameId: CallFrameId; + /** + * Expression to evaluate. + */ + expression: string; + /** + * String object group name to put result into (allows rapid releasing resulting object handles using releaseObjectGroup). + */ + objectGroup?: string | undefined; + /** + * Specifies whether command line API should be available to the evaluated expression, defaults to false. + */ + includeCommandLineAPI?: boolean | undefined; + /** + * In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides setPauseOnException state. + */ + silent?: boolean | undefined; + /** + * Whether the result is expected to be a JSON object that should be sent by value. + */ + returnByValue?: boolean | undefined; + /** + * Whether preview should be generated for the result. + * @experimental + */ + generatePreview?: boolean | undefined; + /** + * Whether to throw an exception if side effect cannot be ruled out during evaluation. + */ + throwOnSideEffect?: boolean | undefined; + } + interface SetVariableValueParameterType { + /** + * 0-based number of scope as was listed in scope chain. Only 'local', 'closure' and 'catch' scope types are allowed. Other scopes could be manipulated manually. + */ + scopeNumber: number; + /** + * Variable name. + */ + variableName: string; + /** + * New variable value. + */ + newValue: Runtime.CallArgument; + /** + * Id of callframe that holds variable. + */ + callFrameId: CallFrameId; + } + interface SetReturnValueParameterType { + /** + * New return value. + */ + newValue: Runtime.CallArgument; + } + interface SetAsyncCallStackDepthParameterType { + /** + * Maximum depth of async call stacks. Setting to 0 will effectively disable collecting async call stacks (default). + */ + maxDepth: number; + } + interface SetBlackboxPatternsParameterType { + /** + * Array of regexps that will be used to check script url for blackbox state. + */ + patterns: string[]; + } + interface SetBlackboxedRangesParameterType { + /** + * Id of the script. + */ + scriptId: Runtime.ScriptId; + positions: ScriptPosition[]; + } + interface EnableReturnType { + /** + * Unique identifier of the debugger. + * @experimental + */ + debuggerId: Runtime.UniqueDebuggerId; + } + interface SetBreakpointByUrlReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * List of the locations this breakpoint resolved into upon addition. + */ + locations: Location[]; + } + interface SetBreakpointReturnType { + /** + * Id of the created breakpoint for further reference. + */ + breakpointId: BreakpointId; + /** + * Location this breakpoint resolved into. + */ + actualLocation: Location; + } + interface GetPossibleBreakpointsReturnType { + /** + * List of the possible breakpoint locations. + */ + locations: BreakLocation[]; + } + interface GetStackTraceReturnType { + stackTrace: Runtime.StackTrace; + } + interface SearchInContentReturnType { + /** + * List of search matches. + */ + result: SearchMatch[]; + } + interface SetScriptSourceReturnType { + /** + * New stack trace in case editing has happened while VM was stopped. + */ + callFrames?: CallFrame[] | undefined; + /** + * Whether current call stack was modified after applying the changes. + */ + stackChanged?: boolean | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Exception details if any. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface RestartFrameReturnType { + /** + * New stack trace. + */ + callFrames: CallFrame[]; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + } + interface GetScriptSourceReturnType { + /** + * Script source. + */ + scriptSource: string; + } + interface EvaluateOnCallFrameReturnType { + /** + * Object wrapper for the evaluation result. + */ + result: Runtime.RemoteObject; + /** + * Exception details. + */ + exceptionDetails?: Runtime.ExceptionDetails | undefined; + } + interface ScriptParsedEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * True, if this script is generated as a result of the live edit operation. + * @experimental + */ + isLiveEdit?: boolean | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface ScriptFailedToParseEventDataType { + /** + * Identifier of the script parsed. + */ + scriptId: Runtime.ScriptId; + /** + * URL or name of the script parsed (if any). + */ + url: string; + /** + * Line offset of the script within the resource with given URL (for script tags). + */ + startLine: number; + /** + * Column offset of the script within the resource with given URL. + */ + startColumn: number; + /** + * Last line of the script. + */ + endLine: number; + /** + * Length of the last line of the script. + */ + endColumn: number; + /** + * Specifies script creation context. + */ + executionContextId: Runtime.ExecutionContextId; + /** + * Content hash of the script. + */ + hash: string; + /** + * Embedder-specific auxiliary data. + */ + executionContextAuxData?: {} | undefined; + /** + * URL of source map associated with script (if any). + */ + sourceMapURL?: string | undefined; + /** + * True, if this script has sourceURL. + */ + hasSourceURL?: boolean | undefined; + /** + * True, if this script is ES6 module. + */ + isModule?: boolean | undefined; + /** + * This script length. + */ + length?: number | undefined; + /** + * JavaScript top stack frame of where the script parsed event was triggered if available. + * @experimental + */ + stackTrace?: Runtime.StackTrace | undefined; + } + interface BreakpointResolvedEventDataType { + /** + * Breakpoint unique identifier. + */ + breakpointId: BreakpointId; + /** + * Actual breakpoint location. + */ + location: Location; + } + interface PausedEventDataType { + /** + * Call stack the virtual machine stopped on. + */ + callFrames: CallFrame[]; + /** + * Pause reason. + */ + reason: string; + /** + * Object containing break-specific auxiliary properties. + */ + data?: {} | undefined; + /** + * Hit breakpoints IDs + */ + hitBreakpoints?: string[] | undefined; + /** + * Async stack trace, if any. + */ + asyncStackTrace?: Runtime.StackTrace | undefined; + /** + * Async stack trace, if any. + * @experimental + */ + asyncStackTraceId?: Runtime.StackTraceId | undefined; + /** + * Just scheduled async call will have this stack trace as parent stack during async execution. This field is available only after Debugger.stepInto call with breakOnAsynCall flag. + * @experimental + */ + asyncCallStackTraceId?: Runtime.StackTraceId | undefined; + } + } + namespace Console { + /** + * Console message. + */ + interface ConsoleMessage { + /** + * Message source. + */ + source: string; + /** + * Message severity. + */ + level: string; + /** + * Message text. + */ + text: string; + /** + * URL of the message origin. + */ + url?: string | undefined; + /** + * Line number in the resource that generated this message (1-based). + */ + line?: number | undefined; + /** + * Column number in the resource that generated this message (1-based). + */ + column?: number | undefined; + } + interface MessageAddedEventDataType { + /** + * Console message that has been added. + */ + message: ConsoleMessage; + } + } + namespace Profiler { + /** + * Profile node. Holds callsite information, execution statistics and child nodes. + */ + interface ProfileNode { + /** + * Unique id of the node. + */ + id: number; + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Number of samples where this node was on top of the call stack. + */ + hitCount?: number | undefined; + /** + * Child node ids. + */ + children?: number[] | undefined; + /** + * The reason of being not optimized. The function may be deoptimized or marked as don't optimize. + */ + deoptReason?: string | undefined; + /** + * An array of source position ticks. + */ + positionTicks?: PositionTickInfo[] | undefined; + } + /** + * Profile. + */ + interface Profile { + /** + * The list of profile nodes. First item is the root node. + */ + nodes: ProfileNode[]; + /** + * Profiling start timestamp in microseconds. + */ + startTime: number; + /** + * Profiling end timestamp in microseconds. + */ + endTime: number; + /** + * Ids of samples top nodes. + */ + samples?: number[] | undefined; + /** + * Time intervals between adjacent samples in microseconds. The first delta is relative to the profile startTime. + */ + timeDeltas?: number[] | undefined; + } + /** + * Specifies a number of samples attributed to a certain source position. + */ + interface PositionTickInfo { + /** + * Source line number (1-based). + */ + line: number; + /** + * Number of samples attributed to the source line. + */ + ticks: number; + } + /** + * Coverage data for a source range. + */ + interface CoverageRange { + /** + * JavaScript script source offset for the range start. + */ + startOffset: number; + /** + * JavaScript script source offset for the range end. + */ + endOffset: number; + /** + * Collected execution count of the source range. + */ + count: number; + } + /** + * Coverage data for a JavaScript function. + */ + interface FunctionCoverage { + /** + * JavaScript function name. + */ + functionName: string; + /** + * Source ranges inside the function with coverage data. + */ + ranges: CoverageRange[]; + /** + * Whether coverage data for this function has block granularity. + */ + isBlockCoverage: boolean; + } + /** + * Coverage data for a JavaScript script. + */ + interface ScriptCoverage { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Functions contained in the script that has coverage data. + */ + functions: FunctionCoverage[]; + } + /** + * Describes a type collected during runtime. + * @experimental + */ + interface TypeObject { + /** + * Name of a type collected with type profiling. + */ + name: string; + } + /** + * Source offset and types for a parameter or return value. + * @experimental + */ + interface TypeProfileEntry { + /** + * Source offset of the parameter or end of function for return values. + */ + offset: number; + /** + * The types for this parameter or return value. + */ + types: TypeObject[]; + } + /** + * Type profile data collected during runtime for a JavaScript script. + * @experimental + */ + interface ScriptTypeProfile { + /** + * JavaScript script id. + */ + scriptId: Runtime.ScriptId; + /** + * JavaScript script name or url. + */ + url: string; + /** + * Type profile entries for parameters and return values of the functions in the script. + */ + entries: TypeProfileEntry[]; + } + interface SetSamplingIntervalParameterType { + /** + * New sampling interval in microseconds. + */ + interval: number; + } + interface StartPreciseCoverageParameterType { + /** + * Collect accurate call counts beyond simple 'covered' or 'not covered'. + */ + callCount?: boolean | undefined; + /** + * Collect block-based coverage. + */ + detailed?: boolean | undefined; + } + interface StopReturnType { + /** + * Recorded profile. + */ + profile: Profile; + } + interface TakePreciseCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface GetBestEffortCoverageReturnType { + /** + * Coverage data for the current isolate. + */ + result: ScriptCoverage[]; + } + interface TakeTypeProfileReturnType { + /** + * Type profile for all scripts since startTypeProfile() was turned on. + */ + result: ScriptTypeProfile[]; + } + interface ConsoleProfileStartedEventDataType { + id: string; + /** + * Location of console.profile(). + */ + location: Debugger.Location; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + interface ConsoleProfileFinishedEventDataType { + id: string; + /** + * Location of console.profileEnd(). + */ + location: Debugger.Location; + profile: Profile; + /** + * Profile title passed as an argument to console.profile(). + */ + title?: string | undefined; + } + } + namespace HeapProfiler { + /** + * Heap snapshot object id. + */ + type HeapSnapshotObjectId = string; + /** + * Sampling Heap Profile node. Holds callsite information, allocation statistics and child nodes. + */ + interface SamplingHeapProfileNode { + /** + * Function location. + */ + callFrame: Runtime.CallFrame; + /** + * Allocations size in bytes for the node excluding children. + */ + selfSize: number; + /** + * Child nodes. + */ + children: SamplingHeapProfileNode[]; + } + /** + * Profile. + */ + interface SamplingHeapProfile { + head: SamplingHeapProfileNode; + } + interface StartTrackingHeapObjectsParameterType { + trackAllocations?: boolean | undefined; + } + interface StopTrackingHeapObjectsParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. + */ + reportProgress?: boolean | undefined; + } + interface TakeHeapSnapshotParameterType { + /** + * If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. + */ + reportProgress?: boolean | undefined; + } + interface GetObjectByHeapObjectIdParameterType { + objectId: HeapSnapshotObjectId; + /** + * Symbolic group name that can be used to release multiple objects. + */ + objectGroup?: string | undefined; + } + interface AddInspectedHeapObjectParameterType { + /** + * Heap snapshot object id to be accessible by means of $x command line API. + */ + heapObjectId: HeapSnapshotObjectId; + } + interface GetHeapObjectIdParameterType { + /** + * Identifier of the object to get heap object id for. + */ + objectId: Runtime.RemoteObjectId; + } + interface StartSamplingParameterType { + /** + * Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + */ + samplingInterval?: number | undefined; + } + interface GetObjectByHeapObjectIdReturnType { + /** + * Evaluation result. + */ + result: Runtime.RemoteObject; + } + interface GetHeapObjectIdReturnType { + /** + * Id of the heap snapshot object corresponding to the passed remote object id. + */ + heapSnapshotObjectId: HeapSnapshotObjectId; + } + interface StopSamplingReturnType { + /** + * Recorded sampling heap profile. + */ + profile: SamplingHeapProfile; + } + interface GetSamplingProfileReturnType { + /** + * Return the sampling profile being collected. + */ + profile: SamplingHeapProfile; + } + interface AddHeapSnapshotChunkEventDataType { + chunk: string; + } + interface ReportHeapSnapshotProgressEventDataType { + done: number; + total: number; + finished?: boolean | undefined; + } + interface LastSeenObjectIdEventDataType { + lastSeenObjectId: number; + timestamp: number; + } + interface HeapStatsUpdateEventDataType { + /** + * An array of triplets. Each triplet describes a fragment. The first integer is the fragment index, the second integer is a total count of objects for the fragment, the third integer is a total size of the objects for the fragment. + */ + statsUpdate: number[]; + } + } + namespace NodeTracing { + interface TraceConfig { + /** + * Controls how the trace buffer stores data. + */ + recordMode?: string | undefined; + /** + * Included category filters. + */ + includedCategories: string[]; + } + interface StartParameterType { + traceConfig: TraceConfig; + } + interface GetCategoriesReturnType { + /** + * A list of supported tracing categories. + */ + categories: string[]; + } + interface DataCollectedEventDataType { + value: Array<{}>; + } + } + namespace NodeWorker { + type WorkerID = string; + /** + * Unique identifier of attached debugging session. + */ + type SessionID = string; + interface WorkerInfo { + workerId: WorkerID; + type: string; + title: string; + url: string; + } + interface SendMessageToWorkerParameterType { + message: string; + /** + * Identifier of the session. + */ + sessionId: SessionID; + } + interface EnableParameterType { + /** + * Whether to new workers should be paused until the frontend sends `Runtime.runIfWaitingForDebugger` + * message to run them. + */ + waitForDebuggerOnStart: boolean; + } + interface DetachParameterType { + sessionId: SessionID; + } + interface AttachedToWorkerEventDataType { + /** + * Identifier assigned to the session used to send/receive messages. + */ + sessionId: SessionID; + workerInfo: WorkerInfo; + waitingForDebugger: boolean; + } + interface DetachedFromWorkerEventDataType { + /** + * Detached session identifier. + */ + sessionId: SessionID; + } + interface ReceivedMessageFromWorkerEventDataType { + /** + * Identifier of a session which sends a message. + */ + sessionId: SessionID; + message: string; + } + } + namespace NodeRuntime { + interface NotifyWhenWaitingForDisconnectParameterType { + enabled: boolean; + } + } + /** + * The `inspector.Session` is used for dispatching messages to the V8 inspector + * back-end and receiving message responses and notifications. + */ + class Session extends EventEmitter { + /** + * Create a new instance of the inspector.Session class. + * The inspector session needs to be connected through session.connect() before the messages can be dispatched to the inspector backend. + */ + constructor(); + /** + * Connects a session to the inspector back-end. + * @since v8.0.0 + */ + connect(): void; + /** + * Immediately close the session. All pending message callbacks will be called + * with an error. `session.connect()` will need to be called to be able to send + * messages again. Reconnected session will lose all inspector state, such as + * enabled agents or configured breakpoints. + * @since v8.0.0 + */ + disconnect(): void; + /** + * Posts a message to the inspector back-end. `callback` will be notified when + * a response is received. `callback` is a function that accepts two optional + * arguments: error and message-specific result. + * + * ```js + * session.post('Runtime.evaluate', { expression: '2 + 2' }, + * (error, { result }) => console.log(result)); + * // Output: { type: 'number', value: 4, description: '4' } + * ``` + * + * The latest version of the V8 inspector protocol is published on the [Chrome DevTools Protocol Viewer](https://chromedevtools.github.io/devtools-protocol/v8/). + * + * Node.js inspector supports all the Chrome DevTools Protocol domains declared + * by V8\. Chrome DevTools Protocol domain provides an interface for interacting + * with one of the runtime agents used to inspect the application state and listen + * to the run-time events. + * + * ## Example usage + * + * Apart from the debugger, various V8 Profilers are available through the DevTools + * protocol. + * @since v8.0.0 + */ + post(method: string, params?: {}, callback?: (err: Error | null, params?: {}) => void): void; + post(method: string, callback?: (err: Error | null, params?: {}) => void): void; + /** + * Returns supported domains. + */ + post(method: 'Schema.getDomains', callback?: (err: Error | null, params: Schema.GetDomainsReturnType) => void): void; + /** + * Evaluates expression on global object. + */ + post(method: 'Runtime.evaluate', params?: Runtime.EvaluateParameterType, callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + post(method: 'Runtime.evaluate', callback?: (err: Error | null, params: Runtime.EvaluateReturnType) => void): void; + /** + * Add handler to promise with given promise object id. + */ + post(method: 'Runtime.awaitPromise', params?: Runtime.AwaitPromiseParameterType, callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + post(method: 'Runtime.awaitPromise', callback?: (err: Error | null, params: Runtime.AwaitPromiseReturnType) => void): void; + /** + * Calls function with given declaration on the given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.callFunctionOn', params?: Runtime.CallFunctionOnParameterType, callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + post(method: 'Runtime.callFunctionOn', callback?: (err: Error | null, params: Runtime.CallFunctionOnReturnType) => void): void; + /** + * Returns properties of a given object. Object group of the result is inherited from the target object. + */ + post(method: 'Runtime.getProperties', params?: Runtime.GetPropertiesParameterType, callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + post(method: 'Runtime.getProperties', callback?: (err: Error | null, params: Runtime.GetPropertiesReturnType) => void): void; + /** + * Releases remote object with given id. + */ + post(method: 'Runtime.releaseObject', params?: Runtime.ReleaseObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObject', callback?: (err: Error | null) => void): void; + /** + * Releases all remote objects that belong to a given group. + */ + post(method: 'Runtime.releaseObjectGroup', params?: Runtime.ReleaseObjectGroupParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.releaseObjectGroup', callback?: (err: Error | null) => void): void; + /** + * Tells inspected instance to run if it was waiting for debugger to attach. + */ + post(method: 'Runtime.runIfWaitingForDebugger', callback?: (err: Error | null) => void): void; + /** + * Enables reporting of execution contexts creation by means of executionContextCreated event. When the reporting gets enabled the event will be sent immediately for each existing execution context. + */ + post(method: 'Runtime.enable', callback?: (err: Error | null) => void): void; + /** + * Disables reporting of execution contexts creation. + */ + post(method: 'Runtime.disable', callback?: (err: Error | null) => void): void; + /** + * Discards collected exceptions and console API calls. + */ + post(method: 'Runtime.discardConsoleEntries', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Runtime.setCustomObjectFormatterEnabled', params?: Runtime.SetCustomObjectFormatterEnabledParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Runtime.setCustomObjectFormatterEnabled', callback?: (err: Error | null) => void): void; + /** + * Compiles expression. + */ + post(method: 'Runtime.compileScript', params?: Runtime.CompileScriptParameterType, callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + post(method: 'Runtime.compileScript', callback?: (err: Error | null, params: Runtime.CompileScriptReturnType) => void): void; + /** + * Runs script with given id in a given context. + */ + post(method: 'Runtime.runScript', params?: Runtime.RunScriptParameterType, callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.runScript', callback?: (err: Error | null, params: Runtime.RunScriptReturnType) => void): void; + post(method: 'Runtime.queryObjects', params?: Runtime.QueryObjectsParameterType, callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + post(method: 'Runtime.queryObjects', callback?: (err: Error | null, params: Runtime.QueryObjectsReturnType) => void): void; + /** + * Returns all let, const and class variables from global scope. + */ + post( + method: 'Runtime.globalLexicalScopeNames', + params?: Runtime.GlobalLexicalScopeNamesParameterType, + callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void + ): void; + post(method: 'Runtime.globalLexicalScopeNames', callback?: (err: Error | null, params: Runtime.GlobalLexicalScopeNamesReturnType) => void): void; + /** + * Enables debugger for the given page. Clients should not assume that the debugging has been enabled until the result for this command is received. + */ + post(method: 'Debugger.enable', callback?: (err: Error | null, params: Debugger.EnableReturnType) => void): void; + /** + * Disables debugger for given page. + */ + post(method: 'Debugger.disable', callback?: (err: Error | null) => void): void; + /** + * Activates / deactivates all breakpoints on the page. + */ + post(method: 'Debugger.setBreakpointsActive', params?: Debugger.SetBreakpointsActiveParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBreakpointsActive', callback?: (err: Error | null) => void): void; + /** + * Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). + */ + post(method: 'Debugger.setSkipAllPauses', params?: Debugger.SetSkipAllPausesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setSkipAllPauses', callback?: (err: Error | null) => void): void; + /** + * Sets JavaScript breakpoint at given location specified either by URL or URL regex. Once this command is issued, all existing parsed scripts will have breakpoints resolved and returned in locations property. Further matching script parsing will result in subsequent breakpointResolved events issued. This logical breakpoint will survive page reloads. + */ + post(method: 'Debugger.setBreakpointByUrl', params?: Debugger.SetBreakpointByUrlParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + post(method: 'Debugger.setBreakpointByUrl', callback?: (err: Error | null, params: Debugger.SetBreakpointByUrlReturnType) => void): void; + /** + * Sets JavaScript breakpoint at a given location. + */ + post(method: 'Debugger.setBreakpoint', params?: Debugger.SetBreakpointParameterType, callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + post(method: 'Debugger.setBreakpoint', callback?: (err: Error | null, params: Debugger.SetBreakpointReturnType) => void): void; + /** + * Removes JavaScript breakpoint. + */ + post(method: 'Debugger.removeBreakpoint', params?: Debugger.RemoveBreakpointParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.removeBreakpoint', callback?: (err: Error | null) => void): void; + /** + * Returns possible locations for breakpoint. scriptId in start and end range locations should be the same. + */ + post( + method: 'Debugger.getPossibleBreakpoints', + params?: Debugger.GetPossibleBreakpointsParameterType, + callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void + ): void; + post(method: 'Debugger.getPossibleBreakpoints', callback?: (err: Error | null, params: Debugger.GetPossibleBreakpointsReturnType) => void): void; + /** + * Continues execution until specific location is reached. + */ + post(method: 'Debugger.continueToLocation', params?: Debugger.ContinueToLocationParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.continueToLocation', callback?: (err: Error | null) => void): void; + /** + * @experimental + */ + post(method: 'Debugger.pauseOnAsyncCall', params?: Debugger.PauseOnAsyncCallParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.pauseOnAsyncCall', callback?: (err: Error | null) => void): void; + /** + * Steps over the statement. + */ + post(method: 'Debugger.stepOver', callback?: (err: Error | null) => void): void; + /** + * Steps into the function call. + */ + post(method: 'Debugger.stepInto', params?: Debugger.StepIntoParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.stepInto', callback?: (err: Error | null) => void): void; + /** + * Steps out of the function call. + */ + post(method: 'Debugger.stepOut', callback?: (err: Error | null) => void): void; + /** + * Stops on the next JavaScript statement. + */ + post(method: 'Debugger.pause', callback?: (err: Error | null) => void): void; + /** + * This method is deprecated - use Debugger.stepInto with breakOnAsyncCall and Debugger.pauseOnAsyncTask instead. Steps into next scheduled async task if any is scheduled before next pause. Returns success when async task is actually scheduled, returns error if no task were scheduled or another scheduleStepIntoAsync was called. + * @experimental + */ + post(method: 'Debugger.scheduleStepIntoAsync', callback?: (err: Error | null) => void): void; + /** + * Resumes JavaScript execution. + */ + post(method: 'Debugger.resume', callback?: (err: Error | null) => void): void; + /** + * Returns stack trace with given stackTraceId. + * @experimental + */ + post(method: 'Debugger.getStackTrace', params?: Debugger.GetStackTraceParameterType, callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + post(method: 'Debugger.getStackTrace', callback?: (err: Error | null, params: Debugger.GetStackTraceReturnType) => void): void; + /** + * Searches for given string in script content. + */ + post(method: 'Debugger.searchInContent', params?: Debugger.SearchInContentParameterType, callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + post(method: 'Debugger.searchInContent', callback?: (err: Error | null, params: Debugger.SearchInContentReturnType) => void): void; + /** + * Edits JavaScript source live. + */ + post(method: 'Debugger.setScriptSource', params?: Debugger.SetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + post(method: 'Debugger.setScriptSource', callback?: (err: Error | null, params: Debugger.SetScriptSourceReturnType) => void): void; + /** + * Restarts particular call frame from the beginning. + */ + post(method: 'Debugger.restartFrame', params?: Debugger.RestartFrameParameterType, callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + post(method: 'Debugger.restartFrame', callback?: (err: Error | null, params: Debugger.RestartFrameReturnType) => void): void; + /** + * Returns source for the script with given id. + */ + post(method: 'Debugger.getScriptSource', params?: Debugger.GetScriptSourceParameterType, callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + post(method: 'Debugger.getScriptSource', callback?: (err: Error | null, params: Debugger.GetScriptSourceReturnType) => void): void; + /** + * Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or no exceptions. Initial pause on exceptions state is none. + */ + post(method: 'Debugger.setPauseOnExceptions', params?: Debugger.SetPauseOnExceptionsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setPauseOnExceptions', callback?: (err: Error | null) => void): void; + /** + * Evaluates expression on a given call frame. + */ + post(method: 'Debugger.evaluateOnCallFrame', params?: Debugger.EvaluateOnCallFrameParameterType, callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + post(method: 'Debugger.evaluateOnCallFrame', callback?: (err: Error | null, params: Debugger.EvaluateOnCallFrameReturnType) => void): void; + /** + * Changes value of variable in a callframe. Object-based scopes are not supported and must be mutated manually. + */ + post(method: 'Debugger.setVariableValue', params?: Debugger.SetVariableValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setVariableValue', callback?: (err: Error | null) => void): void; + /** + * Changes return value in top frame. Available only at return break position. + * @experimental + */ + post(method: 'Debugger.setReturnValue', params?: Debugger.SetReturnValueParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setReturnValue', callback?: (err: Error | null) => void): void; + /** + * Enables or disables async call stacks tracking. + */ + post(method: 'Debugger.setAsyncCallStackDepth', params?: Debugger.SetAsyncCallStackDepthParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setAsyncCallStackDepth', callback?: (err: Error | null) => void): void; + /** + * Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in scripts with url matching one of the patterns. VM will try to leave blackboxed script by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + * @experimental + */ + post(method: 'Debugger.setBlackboxPatterns', params?: Debugger.SetBlackboxPatternsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxPatterns', callback?: (err: Error | null) => void): void; + /** + * Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. Positions array contains positions where blackbox state is changed. First interval isn't blackboxed. Array should be sorted. + * @experimental + */ + post(method: 'Debugger.setBlackboxedRanges', params?: Debugger.SetBlackboxedRangesParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Debugger.setBlackboxedRanges', callback?: (err: Error | null) => void): void; + /** + * Enables console domain, sends the messages collected so far to the client by means of the messageAdded notification. + */ + post(method: 'Console.enable', callback?: (err: Error | null) => void): void; + /** + * Disables console domain, prevents further console messages from being reported to the client. + */ + post(method: 'Console.disable', callback?: (err: Error | null) => void): void; + /** + * Does nothing. + */ + post(method: 'Console.clearMessages', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.disable', callback?: (err: Error | null) => void): void; + /** + * Changes CPU profiler sampling interval. Must be called before CPU profiles recording started. + */ + post(method: 'Profiler.setSamplingInterval', params?: Profiler.SetSamplingIntervalParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.setSamplingInterval', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.start', callback?: (err: Error | null) => void): void; + post(method: 'Profiler.stop', callback?: (err: Error | null, params: Profiler.StopReturnType) => void): void; + /** + * Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code coverage may be incomplete. Enabling prevents running optimized code and resets execution counters. + */ + post(method: 'Profiler.startPreciseCoverage', params?: Profiler.StartPreciseCoverageParameterType, callback?: (err: Error | null) => void): void; + post(method: 'Profiler.startPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Disable precise code coverage. Disabling releases unnecessary execution count records and allows executing optimized code. + */ + post(method: 'Profiler.stopPreciseCoverage', callback?: (err: Error | null) => void): void; + /** + * Collect coverage data for the current isolate, and resets execution counters. Precise code coverage needs to have started. + */ + post(method: 'Profiler.takePreciseCoverage', callback?: (err: Error | null, params: Profiler.TakePreciseCoverageReturnType) => void): void; + /** + * Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection. + */ + post(method: 'Profiler.getBestEffortCoverage', callback?: (err: Error | null, params: Profiler.GetBestEffortCoverageReturnType) => void): void; + /** + * Enable type profile. + * @experimental + */ + post(method: 'Profiler.startTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Disable type profile. Disabling releases type profile data collected so far. + * @experimental + */ + post(method: 'Profiler.stopTypeProfile', callback?: (err: Error | null) => void): void; + /** + * Collect type profile. + * @experimental + */ + post(method: 'Profiler.takeTypeProfile', callback?: (err: Error | null, params: Profiler.TakeTypeProfileReturnType) => void): void; + post(method: 'HeapProfiler.enable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.disable', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', params?: HeapProfiler.StartTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', params?: HeapProfiler.StopTrackingHeapObjectsParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopTrackingHeapObjects', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', params?: HeapProfiler.TakeHeapSnapshotParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.takeHeapSnapshot', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.collectGarbage', callback?: (err: Error | null) => void): void; + post( + method: 'HeapProfiler.getObjectByHeapObjectId', + params?: HeapProfiler.GetObjectByHeapObjectIdParameterType, + callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void + ): void; + post(method: 'HeapProfiler.getObjectByHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetObjectByHeapObjectIdReturnType) => void): void; + /** + * Enables console to refer to the node with given id via $x (see Command Line API for more details $x functions). + */ + post(method: 'HeapProfiler.addInspectedHeapObject', params?: HeapProfiler.AddInspectedHeapObjectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.addInspectedHeapObject', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', params?: HeapProfiler.GetHeapObjectIdParameterType, callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.getHeapObjectId', callback?: (err: Error | null, params: HeapProfiler.GetHeapObjectIdReturnType) => void): void; + post(method: 'HeapProfiler.startSampling', params?: HeapProfiler.StartSamplingParameterType, callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.startSampling', callback?: (err: Error | null) => void): void; + post(method: 'HeapProfiler.stopSampling', callback?: (err: Error | null, params: HeapProfiler.StopSamplingReturnType) => void): void; + post(method: 'HeapProfiler.getSamplingProfile', callback?: (err: Error | null, params: HeapProfiler.GetSamplingProfileReturnType) => void): void; + /** + * Gets supported tracing categories. + */ + post(method: 'NodeTracing.getCategories', callback?: (err: Error | null, params: NodeTracing.GetCategoriesReturnType) => void): void; + /** + * Start trace events collection. + */ + post(method: 'NodeTracing.start', params?: NodeTracing.StartParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeTracing.start', callback?: (err: Error | null) => void): void; + /** + * Stop trace events collection. Remaining collected events will be sent as a sequence of + * dataCollected events followed by tracingComplete event. + */ + post(method: 'NodeTracing.stop', callback?: (err: Error | null) => void): void; + /** + * Sends protocol message over session with given id. + */ + post(method: 'NodeWorker.sendMessageToWorker', params?: NodeWorker.SendMessageToWorkerParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.sendMessageToWorker', callback?: (err: Error | null) => void): void; + /** + * Instructs the inspector to attach to running workers. Will also attach to new workers + * as they start + */ + post(method: 'NodeWorker.enable', params?: NodeWorker.EnableParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.enable', callback?: (err: Error | null) => void): void; + /** + * Detaches from all running workers and disables attaching to new workers as they are started. + */ + post(method: 'NodeWorker.disable', callback?: (err: Error | null) => void): void; + /** + * Detached from the worker with given sessionId. + */ + post(method: 'NodeWorker.detach', params?: NodeWorker.DetachParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeWorker.detach', callback?: (err: Error | null) => void): void; + /** + * Enable the `NodeRuntime.waitingForDisconnect`. + */ + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', params?: NodeRuntime.NotifyWhenWaitingForDisconnectParameterType, callback?: (err: Error | null) => void): void; + post(method: 'NodeRuntime.notifyWhenWaitingForDisconnect', callback?: (err: Error | null) => void): void; + // Events + addListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + addListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + addListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + addListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + addListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + addListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + addListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + addListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + addListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + addListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + addListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + addListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + addListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + addListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + addListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + addListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + addListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + addListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + addListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + addListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + addListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + addListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + addListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + addListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + addListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + addListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + addListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'inspectorNotification', message: InspectorNotification<{}>): boolean; + emit(event: 'Runtime.executionContextCreated', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextDestroyed', message: InspectorNotification): boolean; + emit(event: 'Runtime.executionContextsCleared'): boolean; + emit(event: 'Runtime.exceptionThrown', message: InspectorNotification): boolean; + emit(event: 'Runtime.exceptionRevoked', message: InspectorNotification): boolean; + emit(event: 'Runtime.consoleAPICalled', message: InspectorNotification): boolean; + emit(event: 'Runtime.inspectRequested', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptParsed', message: InspectorNotification): boolean; + emit(event: 'Debugger.scriptFailedToParse', message: InspectorNotification): boolean; + emit(event: 'Debugger.breakpointResolved', message: InspectorNotification): boolean; + emit(event: 'Debugger.paused', message: InspectorNotification): boolean; + emit(event: 'Debugger.resumed'): boolean; + emit(event: 'Console.messageAdded', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileStarted', message: InspectorNotification): boolean; + emit(event: 'Profiler.consoleProfileFinished', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.addHeapSnapshotChunk', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.resetProfiles'): boolean; + emit(event: 'HeapProfiler.reportHeapSnapshotProgress', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.lastSeenObjectId', message: InspectorNotification): boolean; + emit(event: 'HeapProfiler.heapStatsUpdate', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.dataCollected', message: InspectorNotification): boolean; + emit(event: 'NodeTracing.tracingComplete'): boolean; + emit(event: 'NodeWorker.attachedToWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.detachedFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeWorker.receivedMessageFromWorker', message: InspectorNotification): boolean; + emit(event: 'NodeRuntime.waitingForDisconnect'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + on(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + on(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + on(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + on(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + on(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + on(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + on(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + on(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + on(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + on(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + on(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + on(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + on(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + on(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + on(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + on(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + on(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + on(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + on(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + on(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + on(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + on(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + on(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + on(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + on(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + on(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + once(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + once(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + once(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + once(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + once(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + once(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + once(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + once(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + once(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + once(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + once(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + once(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + once(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + once(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + once(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + once(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + once(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + once(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + once(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + once(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + once(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + once(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + once(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + once(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + once(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + once(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + /** + * Emitted when any notification from the V8 Inspector is received. + */ + prependOnceListener(event: 'inspectorNotification', listener: (message: InspectorNotification<{}>) => void): this; + /** + * Issued when new execution context is created. + */ + prependOnceListener(event: 'Runtime.executionContextCreated', listener: (message: InspectorNotification) => void): this; + /** + * Issued when execution context is destroyed. + */ + prependOnceListener(event: 'Runtime.executionContextDestroyed', listener: (message: InspectorNotification) => void): this; + /** + * Issued when all executionContexts were cleared in browser + */ + prependOnceListener(event: 'Runtime.executionContextsCleared', listener: () => void): this; + /** + * Issued when exception was thrown and unhandled. + */ + prependOnceListener(event: 'Runtime.exceptionThrown', listener: (message: InspectorNotification) => void): this; + /** + * Issued when unhandled exception was revoked. + */ + prependOnceListener(event: 'Runtime.exceptionRevoked', listener: (message: InspectorNotification) => void): this; + /** + * Issued when console API was called. + */ + prependOnceListener(event: 'Runtime.consoleAPICalled', listener: (message: InspectorNotification) => void): this; + /** + * Issued when object should be inspected (for example, as a result of inspect() command line API call). + */ + prependOnceListener(event: 'Runtime.inspectRequested', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine parses script. This event is also fired for all known and uncollected scripts upon enabling debugger. + */ + prependOnceListener(event: 'Debugger.scriptParsed', listener: (message: InspectorNotification) => void): this; + /** + * Fired when virtual machine fails to parse the script. + */ + prependOnceListener(event: 'Debugger.scriptFailedToParse', listener: (message: InspectorNotification) => void): this; + /** + * Fired when breakpoint is resolved to an actual script and location. + */ + prependOnceListener(event: 'Debugger.breakpointResolved', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine stopped on breakpoint or exception or any other stop criteria. + */ + prependOnceListener(event: 'Debugger.paused', listener: (message: InspectorNotification) => void): this; + /** + * Fired when the virtual machine resumed execution. + */ + prependOnceListener(event: 'Debugger.resumed', listener: () => void): this; + /** + * Issued when new console message is added. + */ + prependOnceListener(event: 'Console.messageAdded', listener: (message: InspectorNotification) => void): this; + /** + * Sent when new profile recording is started using console.profile() call. + */ + prependOnceListener(event: 'Profiler.consoleProfileStarted', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'Profiler.consoleProfileFinished', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.addHeapSnapshotChunk', listener: (message: InspectorNotification) => void): this; + prependOnceListener(event: 'HeapProfiler.resetProfiles', listener: () => void): this; + prependOnceListener(event: 'HeapProfiler.reportHeapSnapshotProgress', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend regularly sends a current value for last seen object id and corresponding timestamp. If the were changes in the heap since last event then one or more heapStatsUpdate events will be sent before a new lastSeenObjectId event. + */ + prependOnceListener(event: 'HeapProfiler.lastSeenObjectId', listener: (message: InspectorNotification) => void): this; + /** + * If heap objects tracking has been started then backend may send update for one or more fragments + */ + prependOnceListener(event: 'HeapProfiler.heapStatsUpdate', listener: (message: InspectorNotification) => void): this; + /** + * Contains an bucket of collected trace events. + */ + prependOnceListener(event: 'NodeTracing.dataCollected', listener: (message: InspectorNotification) => void): this; + /** + * Signals that tracing is stopped and there is no trace buffers pending flush, all data were + * delivered via dataCollected events. + */ + prependOnceListener(event: 'NodeTracing.tracingComplete', listener: () => void): this; + /** + * Issued when attached to a worker. + */ + prependOnceListener(event: 'NodeWorker.attachedToWorker', listener: (message: InspectorNotification) => void): this; + /** + * Issued when detached from the worker. + */ + prependOnceListener(event: 'NodeWorker.detachedFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * Notifies about a new protocol message received from the session + * (session ID is provided in attachedToWorker notification). + */ + prependOnceListener(event: 'NodeWorker.receivedMessageFromWorker', listener: (message: InspectorNotification) => void): this; + /** + * This event is fired instead of `Runtime.executionContextDestroyed` when + * enabled. + * It is fired when the Node process finished all code execution and is + * waiting for all frontends to disconnect. + */ + prependOnceListener(event: 'NodeRuntime.waitingForDisconnect', listener: () => void): this; + } + /** + * Activate inspector on host and port. Equivalent to`node --inspect=[[host:]port]`, but can be done programmatically after node has + * started. + * + * If wait is `true`, will block until a client has connected to the inspect port + * and flow control has been passed to the debugger client. + * + * See the `security warning` regarding the `host`parameter usage. + * @param [port='what was specified on the CLI'] Port to listen on for inspector connections. Optional. + * @param [host='what was specified on the CLI'] Host to listen on for inspector connections. Optional. + * @param [wait=false] Block until a client has connected. Optional. + */ + function open(port?: number, host?: string, wait?: boolean): void; + /** + * Deactivate the inspector. Blocks until there are no active connections. + */ + function close(): void; + /** + * Return the URL of the active inspector, or `undefined` if there is none. + * + * ```console + * $ node --inspect -p 'inspector.url()' + * Debugger listening on ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * For help, see: https://nodejs.org/en/docs/inspector + * ws://127.0.0.1:9229/166e272e-7a30-4d09-97ce-f1c012b43c34 + * + * $ node --inspect=localhost:3000 -p 'inspector.url()' + * Debugger listening on ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * For help, see: https://nodejs.org/en/docs/inspector + * ws://localhost:3000/51cf8d0e-3c36-4c59-8efd-54519839e56a + * + * $ node -p 'inspector.url()' + * undefined + * ``` + */ + function url(): string | undefined; + /** + * Blocks until a client (existing or connected later) has sent`Runtime.runIfWaitingForDebugger` command. + * + * An exception will be thrown if there is no active inspector. + * @since v12.7.0 + */ + function waitForDebugger(): void; +} +/** + * The inspector module provides an API for interacting with the V8 inspector. + */ +declare module 'node:inspector' { + import inspector = require('inspector'); + export = inspector; +} diff --git a/node_modules/@types/node/ts4.8/module.d.ts b/node_modules/@types/node/ts4.8/module.d.ts new file mode 100755 index 0000000..d83aec9 --- /dev/null +++ b/node_modules/@types/node/ts4.8/module.d.ts @@ -0,0 +1,114 @@ +/** + * @since v0.3.7 + */ +declare module 'module' { + import { URL } from 'node:url'; + namespace Module { + /** + * The `module.syncBuiltinESMExports()` method updates all the live bindings for + * builtin `ES Modules` to match the properties of the `CommonJS` exports. It + * does not add or remove exported names from the `ES Modules`. + * + * ```js + * const fs = require('fs'); + * const assert = require('assert'); + * const { syncBuiltinESMExports } = require('module'); + * + * fs.readFile = newAPI; + * + * delete fs.readFileSync; + * + * function newAPI() { + * // ... + * } + * + * fs.newAPI = newAPI; + * + * syncBuiltinESMExports(); + * + * import('fs').then((esmFS) => { + * // It syncs the existing readFile property with the new value + * assert.strictEqual(esmFS.readFile, newAPI); + * // readFileSync has been deleted from the required fs + * assert.strictEqual('readFileSync' in fs, false); + * // syncBuiltinESMExports() does not remove readFileSync from esmFS + * assert.strictEqual('readFileSync' in esmFS, true); + * // syncBuiltinESMExports() does not add names + * assert.strictEqual(esmFS.newAPI, undefined); + * }); + * ``` + * @since v12.12.0 + */ + function syncBuiltinESMExports(): void; + /** + * `path` is the resolved path for the file for which a corresponding source map + * should be fetched. + * @since v13.7.0, v12.17.0 + */ + function findSourceMap(path: string, error?: Error): SourceMap; + interface SourceMapPayload { + file: string; + version: number; + sources: string[]; + sourcesContent: string[]; + names: string[]; + mappings: string; + sourceRoot: string; + } + interface SourceMapping { + generatedLine: number; + generatedColumn: number; + originalSource: string; + originalLine: number; + originalColumn: number; + } + /** + * @since v13.7.0, v12.17.0 + */ + class SourceMap { + /** + * Getter for the payload used to construct the `SourceMap` instance. + */ + readonly payload: SourceMapPayload; + constructor(payload: SourceMapPayload); + /** + * Given a line number and column number in the generated source file, returns + * an object representing the position in the original file. The object returned + * consists of the following keys: + */ + findEntry(line: number, column: number): SourceMapping; + } + } + interface Module extends NodeModule {} + class Module { + static runMain(): void; + static wrap(code: string): string; + static createRequire(path: string | URL): NodeRequire; + static builtinModules: string[]; + static Module: typeof Module; + constructor(id: string, parent?: Module); + } + global { + interface ImportMeta { + url: string; + /** + * @experimental + * This feature is only available with the `--experimental-import-meta-resolve` + * command flag enabled. + * + * Provides a module-relative resolution function scoped to each module, returning + * the URL string. + * + * @param specified The module specifier to resolve relative to `parent`. + * @param parent The absolute parent module URL to resolve from. If none + * is specified, the value of `import.meta.url` is used as the default. + */ + resolve?(specified: string, parent?: string | URL): Promise; + } + } + export = Module; +} +declare module 'node:module' { + import module = require('module'); + export = module; +} diff --git a/node_modules/@types/node/ts4.8/net.d.ts b/node_modules/@types/node/ts4.8/net.d.ts new file mode 100755 index 0000000..72cebc8 --- /dev/null +++ b/node_modules/@types/node/ts4.8/net.d.ts @@ -0,0 +1,869 @@ +/** + * > Stability: 2 - Stable + * + * The `net` module provides an asynchronous network API for creating stream-based + * TCP or `IPC` servers ({@link createServer}) and clients + * ({@link createConnection}). + * + * It can be accessed using: + * + * ```js + * const net = require('net'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/net.js) + */ +declare module 'net' { + import * as stream from 'node:stream'; + import { Abortable, EventEmitter } from 'node:events'; + import * as dns from 'node:dns'; + type LookupFunction = (hostname: string, options: dns.LookupOneOptions, callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void) => void; + interface AddressInfo { + address: string; + family: string; + port: number; + } + interface SocketConstructorOpts { + fd?: number | undefined; + allowHalfOpen?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + signal?: AbortSignal; + } + interface OnReadOpts { + buffer: Uint8Array | (() => Uint8Array); + /** + * This function is called for every chunk of incoming data. + * Two arguments are passed to it: the number of bytes written to buffer and a reference to buffer. + * Return false from this function to implicitly pause() the socket. + */ + callback(bytesWritten: number, buf: Uint8Array): boolean; + } + interface ConnectOpts { + /** + * If specified, incoming data is stored in a single buffer and passed to the supplied callback when data arrives on the socket. + * Note: this will cause the streaming functionality to not provide any data, however events like 'error', 'end', and 'close' will + * still be emitted as normal and methods like pause() and resume() will also behave as expected. + */ + onread?: OnReadOpts | undefined; + } + interface TcpSocketConnectOpts extends ConnectOpts { + port: number; + host?: string | undefined; + localAddress?: string | undefined; + localPort?: number | undefined; + hints?: number | undefined; + family?: number | undefined; + lookup?: LookupFunction | undefined; + noDelay?: boolean | undefined; + keepAlive?: boolean | undefined; + keepAliveInitialDelay?: number | undefined; + } + interface IpcSocketConnectOpts extends ConnectOpts { + path: string; + } + type SocketConnectOpts = TcpSocketConnectOpts | IpcSocketConnectOpts; + type SocketReadyState = 'opening' | 'open' | 'readOnly' | 'writeOnly' | 'closed'; + /** + * This class is an abstraction of a TCP socket or a streaming `IPC` endpoint + * (uses named pipes on Windows, and Unix domain sockets otherwise). It is also + * an `EventEmitter`. + * + * A `net.Socket` can be created by the user and used directly to interact with + * a server. For example, it is returned by {@link createConnection}, + * so the user can use it to talk to the server. + * + * It can also be created by Node.js and passed to the user when a connection + * is received. For example, it is passed to the listeners of a `'connection'` event emitted on a {@link Server}, so the user can use + * it to interact with the client. + * @since v0.3.4 + */ + class Socket extends stream.Duplex { + constructor(options?: SocketConstructorOpts); + /** + * Sends data on the socket. The second parameter specifies the encoding in the + * case of a string. It defaults to UTF8 encoding. + * + * Returns `true` if the entire data was flushed successfully to the kernel + * buffer. Returns `false` if all or part of the data was queued in user memory.`'drain'` will be emitted when the buffer is again free. + * + * The optional `callback` parameter will be executed when the data is finally + * written out, which may not be immediately. + * + * See `Writable` stream `write()` method for more + * information. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + */ + write(buffer: Uint8Array | string, cb?: (err?: Error) => void): boolean; + write(str: Uint8Array | string, encoding?: BufferEncoding, cb?: (err?: Error) => void): boolean; + /** + * Initiate a connection on a given socket. + * + * Possible signatures: + * + * * `socket.connect(options[, connectListener])` + * * `socket.connect(path[, connectListener])` for `IPC` connections. + * * `socket.connect(port[, host][, connectListener])` for TCP connections. + * * Returns: `net.Socket` The socket itself. + * + * This function is asynchronous. When the connection is established, the `'connect'` event will be emitted. If there is a problem connecting, + * instead of a `'connect'` event, an `'error'` event will be emitted with + * the error passed to the `'error'` listener. + * The last parameter `connectListener`, if supplied, will be added as a listener + * for the `'connect'` event **once**. + * + * This function should only be used for reconnecting a socket after`'close'` has been emitted or otherwise it may lead to undefined + * behavior. + */ + connect(options: SocketConnectOpts, connectionListener?: () => void): this; + connect(port: number, host: string, connectionListener?: () => void): this; + connect(port: number, connectionListener?: () => void): this; + connect(path: string, connectionListener?: () => void): this; + /** + * Set the encoding for the socket as a `Readable Stream`. See `readable.setEncoding()` for more information. + * @since v0.1.90 + * @return The socket itself. + */ + setEncoding(encoding?: BufferEncoding): this; + /** + * Pauses the reading of data. That is, `'data'` events will not be emitted. + * Useful to throttle back an upload. + * @return The socket itself. + */ + pause(): this; + /** + * Close the TCP connection by sending an RST packet and destroy the stream. + * If this TCP socket is in connecting status, it will send an RST packet + * and destroy this TCP socket once it is connected. Otherwise, it will call + * `socket.destroy` with an `ERR_SOCKET_CLOSED` Error. If this is not a TCP socket + * (for example, a pipe), calling this method will immediately throw + * an `ERR_INVALID_HANDLE_TYPE` Error. + * @since v18.3.0 + * @return The socket itself. + */ + resetAndDestroy(): this; + /** + * Resumes reading after a call to `socket.pause()`. + * @return The socket itself. + */ + resume(): this; + /** + * Sets the socket to timeout after `timeout` milliseconds of inactivity on + * the socket. By default `net.Socket` do not have a timeout. + * + * When an idle timeout is triggered the socket will receive a `'timeout'` event but the connection will not be severed. The user must manually call `socket.end()` or `socket.destroy()` to + * end the connection. + * + * ```js + * socket.setTimeout(3000); + * socket.on('timeout', () => { + * console.log('socket timeout'); + * socket.end(); + * }); + * ``` + * + * If `timeout` is 0, then the existing idle timeout is disabled. + * + * The optional `callback` parameter will be added as a one-time listener for the `'timeout'` event. + * @since v0.1.90 + * @return The socket itself. + */ + setTimeout(timeout: number, callback?: () => void): this; + /** + * Enable/disable the use of Nagle's algorithm. + * + * When a TCP connection is created, it will have Nagle's algorithm enabled. + * + * Nagle's algorithm delays data before it is sent via the network. It attempts + * to optimize throughput at the expense of latency. + * + * Passing `true` for `noDelay` or not passing an argument will disable Nagle's + * algorithm for the socket. Passing `false` for `noDelay` will enable Nagle's + * algorithm. + * @since v0.1.90 + * @param [noDelay=true] + * @return The socket itself. + */ + setNoDelay(noDelay?: boolean): this; + /** + * Enable/disable keep-alive functionality, and optionally set the initial + * delay before the first keepalive probe is sent on an idle socket. + * + * Set `initialDelay` (in milliseconds) to set the delay between the last + * data packet received and the first keepalive probe. Setting `0` for`initialDelay` will leave the value unchanged from the default + * (or previous) setting. + * + * Enabling the keep-alive functionality will set the following socket options: + * + * * `SO_KEEPALIVE=1` + * * `TCP_KEEPIDLE=initialDelay` + * * `TCP_KEEPCNT=10` + * * `TCP_KEEPINTVL=1` + * @since v0.1.92 + * @param [enable=false] + * @param [initialDelay=0] + * @return The socket itself. + */ + setKeepAlive(enable?: boolean, initialDelay?: number): this; + /** + * Returns the bound `address`, the address `family` name and `port` of the + * socket as reported by the operating system:`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }` + * @since v0.1.90 + */ + address(): AddressInfo | {}; + /** + * Calling `unref()` on a socket will allow the program to exit if this is the only + * active socket in the event system. If the socket is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + unref(): this; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will _not_ let the program exit if it's the only socket left (the default behavior). + * If the socket is `ref`ed calling `ref` again will have no effect. + * @since v0.9.1 + * @return The socket itself. + */ + ref(): this; + /** + * This property shows the number of characters buffered for writing. The buffer + * may contain strings whose length after encoding is not yet known. So this number + * is only an approximation of the number of bytes in the buffer. + * + * `net.Socket` has the property that `socket.write()` always works. This is to + * help users get up and running quickly. The computer cannot always keep up + * with the amount of data that is written to a socket. The network connection + * simply might be too slow. Node.js will internally queue up the data written to a + * socket and send it out over the wire when it is possible. + * + * The consequence of this internal buffering is that memory may grow. + * Users who experience large or growing `bufferSize` should attempt to + * "throttle" the data flows in their program with `socket.pause()` and `socket.resume()`. + * @since v0.3.8 + * @deprecated Since v14.6.0 - Use `writableLength` instead. + */ + readonly bufferSize: number; + /** + * The amount of received bytes. + * @since v0.5.3 + */ + readonly bytesRead: number; + /** + * The amount of bytes sent. + * @since v0.5.3 + */ + readonly bytesWritten: number; + /** + * If `true`,`socket.connect(options[, connectListener])` was + * called and has not yet finished. It will stay `true` until the socket becomes + * connected, then it is set to `false` and the `'connect'` event is emitted. Note + * that the `socket.connect(options[, connectListener])` callback is a listener for the `'connect'` event. + * @since v6.1.0 + */ + readonly connecting: boolean; + /** + * See `writable.destroyed` for further details. + */ + readonly destroyed: boolean; + /** + * The string representation of the local IP address the remote client is + * connecting on. For example, in a server listening on `'0.0.0.0'`, if a client + * connects on `'192.168.1.1'`, the value of `socket.localAddress` would be`'192.168.1.1'`. + * @since v0.9.6 + */ + readonly localAddress?: string; + /** + * The numeric representation of the local port. For example, `80` or `21`. + * @since v0.9.6 + */ + readonly localPort?: number; + /** + * The string representation of the local IP family. `'IPv4'` or `'IPv6'`. + * @since v18.8.0 + */ + readonly localFamily?: string; + /** + * This property represents the state of the connection as a string. + * @see {https://nodejs.org/api/net.html#socketreadystate} + * @since v0.5.0 + */ + readonly readyState: SocketReadyState; + /** + * The string representation of the remote IP address. For example,`'74.125.127.100'` or `'2001:4860:a005::68'`. Value may be `undefined` if + * the socket is destroyed (for example, if the client disconnected). + * @since v0.5.10 + */ + readonly remoteAddress?: string | undefined; + /** + * The string representation of the remote IP family. `'IPv4'` or `'IPv6'`. + * @since v0.11.14 + */ + readonly remoteFamily?: string | undefined; + /** + * The numeric representation of the remote port. For example, `80` or `21`. + * @since v0.5.10 + */ + readonly remotePort?: number | undefined; + /** + * The socket timeout in milliseconds as set by socket.setTimeout(). It is undefined if a timeout has not been set. + * @since v10.7.0 + */ + readonly timeout?: number | undefined; + /** + * Half-closes the socket. i.e., it sends a FIN packet. It is possible the + * server will still send some data. + * + * See `writable.end()` for further details. + * @since v0.1.90 + * @param [encoding='utf8'] Only used when data is `string`. + * @param callback Optional callback for when the socket is finished. + * @return The socket itself. + */ + end(callback?: () => void): this; + end(buffer: Uint8Array | string, callback?: () => void): this; + end(str: Uint8Array | string, encoding?: BufferEncoding, callback?: () => void): this; + /** + * events.EventEmitter + * 1. close + * 2. connect + * 3. data + * 4. drain + * 5. end + * 6. error + * 7. lookup + * 8. ready + * 9. timeout + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: (hadError: boolean) => void): this; + addListener(event: 'connect', listener: () => void): this; + addListener(event: 'data', listener: (data: Buffer) => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + addListener(event: 'ready', listener: () => void): this; + addListener(event: 'timeout', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close', hadError: boolean): boolean; + emit(event: 'connect'): boolean; + emit(event: 'data', data: Buffer): boolean; + emit(event: 'drain'): boolean; + emit(event: 'end'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'lookup', err: Error, address: string, family: string | number, host: string): boolean; + emit(event: 'ready'): boolean; + emit(event: 'timeout'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: (hadError: boolean) => void): this; + on(event: 'connect', listener: () => void): this; + on(event: 'data', listener: (data: Buffer) => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + on(event: 'ready', listener: () => void): this; + on(event: 'timeout', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: (hadError: boolean) => void): this; + once(event: 'connect', listener: () => void): this; + once(event: 'data', listener: (data: Buffer) => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + once(event: 'ready', listener: () => void): this; + once(event: 'timeout', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: (hadError: boolean) => void): this; + prependListener(event: 'connect', listener: () => void): this; + prependListener(event: 'data', listener: (data: Buffer) => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + prependListener(event: 'ready', listener: () => void): this; + prependListener(event: 'timeout', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: (hadError: boolean) => void): this; + prependOnceListener(event: 'connect', listener: () => void): this; + prependOnceListener(event: 'data', listener: (data: Buffer) => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'lookup', listener: (err: Error, address: string, family: string | number, host: string) => void): this; + prependOnceListener(event: 'ready', listener: () => void): this; + prependOnceListener(event: 'timeout', listener: () => void): this; + } + interface ListenOptions extends Abortable { + port?: number | undefined; + host?: string | undefined; + backlog?: number | undefined; + path?: string | undefined; + exclusive?: boolean | undefined; + readableAll?: boolean | undefined; + writableAll?: boolean | undefined; + /** + * @default false + */ + ipv6Only?: boolean | undefined; + } + interface ServerOpts { + /** + * Indicates whether half-opened TCP connections are allowed. + * @default false + */ + allowHalfOpen?: boolean | undefined; + /** + * Indicates whether the socket should be paused on incoming connections. + * @default false + */ + pauseOnConnect?: boolean | undefined; + /** + * If set to `true`, it disables the use of Nagle's algorithm immediately after a new incoming connection is received. + * @default false + * @since v16.5.0 + */ + noDelay?: boolean | undefined; + /** + * If set to `true`, it enables keep-alive functionality on the socket immediately after a new incoming connection is received, + * similarly on what is done in `socket.setKeepAlive([enable][, initialDelay])`. + * @default false + * @since v16.5.0 + */ + keepAlive?: boolean | undefined; + /** + * If set to a positive number, it sets the initial delay before the first keepalive probe is sent on an idle socket. + * @default 0 + * @since v16.5.0 + */ + keepAliveInitialDelay?: number | undefined; + } + interface DropArgument { + localAddress?: string; + localPort?: number; + localFamily?: string; + remoteAddress?: string; + remotePort?: number; + remoteFamily?: string; + } + /** + * This class is used to create a TCP or `IPC` server. + * @since v0.1.90 + */ + class Server extends EventEmitter { + constructor(connectionListener?: (socket: Socket) => void); + constructor(options?: ServerOpts, connectionListener?: (socket: Socket) => void); + /** + * Start a server listening for connections. A `net.Server` can be a TCP or + * an `IPC` server depending on what it listens to. + * + * Possible signatures: + * + * * `server.listen(handle[, backlog][, callback])` + * * `server.listen(options[, callback])` + * * `server.listen(path[, backlog][, callback])` for `IPC` servers + * * `server.listen([port[, host[, backlog]]][, callback])` for TCP servers + * + * This function is asynchronous. When the server starts listening, the `'listening'` event will be emitted. The last parameter `callback`will be added as a listener for the `'listening'` + * event. + * + * All `listen()` methods can take a `backlog` parameter to specify the maximum + * length of the queue of pending connections. The actual length will be determined + * by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn`on Linux. The default value of this parameter is 511 (not 512). + * + * All {@link Socket} are set to `SO_REUSEADDR` (see [`socket(7)`](https://man7.org/linux/man-pages/man7/socket.7.html) for + * details). + * + * The `server.listen()` method can be called again if and only if there was an + * error during the first `server.listen()` call or `server.close()` has been + * called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. + * + * One of the most common errors raised when listening is `EADDRINUSE`. + * This happens when another server is already listening on the requested`port`/`path`/`handle`. One way to handle this would be to retry + * after a certain amount of time: + * + * ```js + * server.on('error', (e) => { + * if (e.code === 'EADDRINUSE') { + * console.log('Address in use, retrying...'); + * setTimeout(() => { + * server.close(); + * server.listen(PORT, HOST); + * }, 1000); + * } + * }); + * ``` + */ + listen(port?: number, hostname?: string, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, hostname?: string, listeningListener?: () => void): this; + listen(port?: number, backlog?: number, listeningListener?: () => void): this; + listen(port?: number, listeningListener?: () => void): this; + listen(path: string, backlog?: number, listeningListener?: () => void): this; + listen(path: string, listeningListener?: () => void): this; + listen(options: ListenOptions, listeningListener?: () => void): this; + listen(handle: any, backlog?: number, listeningListener?: () => void): this; + listen(handle: any, listeningListener?: () => void): this; + /** + * Stops the server from accepting new connections and keeps existing + * connections. This function is asynchronous, the server is finally closed + * when all connections are ended and the server emits a `'close'` event. + * The optional `callback` will be called once the `'close'` event occurs. Unlike + * that event, it will be called with an `Error` as its only argument if the server + * was not open when it was closed. + * @since v0.1.90 + * @param callback Called when the server is closed. + */ + close(callback?: (err?: Error) => void): this; + /** + * Returns the bound `address`, the address `family` name, and `port` of the server + * as reported by the operating system if listening on an IP socket + * (useful to find which port was assigned when getting an OS-assigned address):`{ port: 12346, family: 'IPv4', address: '127.0.0.1' }`. + * + * For a server listening on a pipe or Unix domain socket, the name is returned + * as a string. + * + * ```js + * const server = net.createServer((socket) => { + * socket.end('goodbye\n'); + * }).on('error', (err) => { + * // Handle errors here. + * throw err; + * }); + * + * // Grab an arbitrary unused port. + * server.listen(() => { + * console.log('opened server on', server.address()); + * }); + * ``` + * + * `server.address()` returns `null` before the `'listening'` event has been + * emitted or after calling `server.close()`. + * @since v0.1.90 + */ + address(): AddressInfo | string | null; + /** + * Asynchronously get the number of concurrent connections on the server. Works + * when sockets were sent to forks. + * + * Callback should take two arguments `err` and `count`. + * @since v0.9.7 + */ + getConnections(cb: (error: Error | null, count: number) => void): void; + /** + * Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will _not_ let the program exit if it's the only server left (the default behavior). + * If the server is `ref`ed calling `ref()` again will have no effect. + * @since v0.9.1 + */ + ref(): this; + /** + * Calling `unref()` on a server will allow the program to exit if this is the only + * active server in the event system. If the server is already `unref`ed calling`unref()` again will have no effect. + * @since v0.9.1 + */ + unref(): this; + /** + * Set this property to reject connections when the server's connection count gets + * high. + * + * It is not recommended to use this option once a socket has been sent to a child + * with `child_process.fork()`. + * @since v0.2.0 + */ + maxConnections: number; + connections: number; + /** + * Indicates whether or not the server is listening for connections. + * @since v5.7.0 + */ + listening: boolean; + /** + * events.EventEmitter + * 1. close + * 2. connection + * 3. error + * 4. listening + * 5. drop + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'connection', listener: (socket: Socket) => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'listening', listener: () => void): this; + addListener(event: 'drop', listener: (data?: DropArgument) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'connection', socket: Socket): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'listening'): boolean; + emit(event: 'drop', data?: DropArgument): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'connection', listener: (socket: Socket) => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'listening', listener: () => void): this; + on(event: 'drop', listener: (data?: DropArgument) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'connection', listener: (socket: Socket) => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'listening', listener: () => void): this; + once(event: 'drop', listener: (data?: DropArgument) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'connection', listener: (socket: Socket) => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'listening', listener: () => void): this; + prependListener(event: 'drop', listener: (data?: DropArgument) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'connection', listener: (socket: Socket) => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'listening', listener: () => void): this; + prependOnceListener(event: 'drop', listener: (data?: DropArgument) => void): this; + } + type IPVersion = 'ipv4' | 'ipv6'; + /** + * The `BlockList` object can be used with some network APIs to specify rules for + * disabling inbound or outbound access to specific IP addresses, IP ranges, or + * IP subnets. + * @since v15.0.0, v14.18.0 + */ + class BlockList { + /** + * Adds a rule to block the given IP address. + * @since v15.0.0, v14.18.0 + * @param address An IPv4 or IPv6 address. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addAddress(address: string, type?: IPVersion): void; + addAddress(address: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses from `start` (inclusive) to`end` (inclusive). + * @since v15.0.0, v14.18.0 + * @param start The starting IPv4 or IPv6 address in the range. + * @param end The ending IPv4 or IPv6 address in the range. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addRange(start: string, end: string, type?: IPVersion): void; + addRange(start: SocketAddress, end: SocketAddress): void; + /** + * Adds a rule to block a range of IP addresses specified as a subnet mask. + * @since v15.0.0, v14.18.0 + * @param net The network IPv4 or IPv6 address. + * @param prefix The number of CIDR prefix bits. For IPv4, this must be a value between `0` and `32`. For IPv6, this must be between `0` and `128`. + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + addSubnet(net: SocketAddress, prefix: number): void; + addSubnet(net: string, prefix: number, type?: IPVersion): void; + /** + * Returns `true` if the given IP address matches any of the rules added to the`BlockList`. + * + * ```js + * const blockList = new net.BlockList(); + * blockList.addAddress('123.123.123.123'); + * blockList.addRange('10.0.0.1', '10.0.0.10'); + * blockList.addSubnet('8592:757c:efae:4e45::', 64, 'ipv6'); + * + * console.log(blockList.check('123.123.123.123')); // Prints: true + * console.log(blockList.check('10.0.0.3')); // Prints: true + * console.log(blockList.check('222.111.111.222')); // Prints: false + * + * // IPv6 notation for IPv4 addresses works: + * console.log(blockList.check('::ffff:7b7b:7b7b', 'ipv6')); // Prints: true + * console.log(blockList.check('::ffff:123.123.123.123', 'ipv6')); // Prints: true + * ``` + * @since v15.0.0, v14.18.0 + * @param address The IP address to check + * @param [type='ipv4'] Either `'ipv4'` or `'ipv6'`. + */ + check(address: SocketAddress): boolean; + check(address: string, type?: IPVersion): boolean; + } + interface TcpNetConnectOpts extends TcpSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + interface IpcNetConnectOpts extends IpcSocketConnectOpts, SocketConstructorOpts { + timeout?: number | undefined; + } + type NetConnectOpts = TcpNetConnectOpts | IpcNetConnectOpts; + /** + * Creates a new TCP or `IPC` server. + * + * If `allowHalfOpen` is set to `true`, when the other end of the socket + * signals the end of transmission, the server will only send back the end of + * transmission when `socket.end()` is explicitly called. For example, in the + * context of TCP, when a FIN packed is received, a FIN packed is sent + * back only when `socket.end()` is explicitly called. Until then the + * connection is half-closed (non-readable but still writable). See `'end'` event and [RFC 1122](https://tools.ietf.org/html/rfc1122) (section 4.2.2.13) for more information. + * + * If `pauseOnConnect` is set to `true`, then the socket associated with each + * incoming connection will be paused, and no data will be read from its handle. + * This allows connections to be passed between processes without any data being + * read by the original process. To begin reading data from a paused socket, call `socket.resume()`. + * + * The server can be a TCP server or an `IPC` server, depending on what it `listen()` to. + * + * Here is an example of a TCP echo server which listens for connections + * on port 8124: + * + * ```js + * const net = require('net'); + * const server = net.createServer((c) => { + * // 'connection' listener. + * console.log('client connected'); + * c.on('end', () => { + * console.log('client disconnected'); + * }); + * c.write('hello\r\n'); + * c.pipe(c); + * }); + * server.on('error', (err) => { + * throw err; + * }); + * server.listen(8124, () => { + * console.log('server bound'); + * }); + * ``` + * + * Test this by using `telnet`: + * + * ```console + * $ telnet localhost 8124 + * ``` + * + * To listen on the socket `/tmp/echo.sock`: + * + * ```js + * server.listen('/tmp/echo.sock', () => { + * console.log('server bound'); + * }); + * ``` + * + * Use `nc` to connect to a Unix domain socket server: + * + * ```console + * $ nc -U /tmp/echo.sock + * ``` + * @since v0.5.0 + * @param connectionListener Automatically set as a listener for the {@link 'connection'} event. + */ + function createServer(connectionListener?: (socket: Socket) => void): Server; + function createServer(options?: ServerOpts, connectionListener?: (socket: Socket) => void): Server; + /** + * Aliases to {@link createConnection}. + * + * Possible signatures: + * + * * {@link connect} + * * {@link connect} for `IPC` connections. + * * {@link connect} for TCP connections. + */ + function connect(options: NetConnectOpts, connectionListener?: () => void): Socket; + function connect(port: number, host?: string, connectionListener?: () => void): Socket; + function connect(path: string, connectionListener?: () => void): Socket; + /** + * A factory function, which creates a new {@link Socket}, + * immediately initiates connection with `socket.connect()`, + * then returns the `net.Socket` that starts the connection. + * + * When the connection is established, a `'connect'` event will be emitted + * on the returned socket. The last parameter `connectListener`, if supplied, + * will be added as a listener for the `'connect'` event **once**. + * + * Possible signatures: + * + * * {@link createConnection} + * * {@link createConnection} for `IPC` connections. + * * {@link createConnection} for TCP connections. + * + * The {@link connect} function is an alias to this function. + */ + function createConnection(options: NetConnectOpts, connectionListener?: () => void): Socket; + function createConnection(port: number, host?: string, connectionListener?: () => void): Socket; + function createConnection(path: string, connectionListener?: () => void): Socket; + /** + * Returns `6` if `input` is an IPv6 address. Returns `4` if `input` is an IPv4 + * address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no leading zeroes. Otherwise, returns`0`. + * + * ```js + * net.isIP('::1'); // returns 6 + * net.isIP('127.0.0.1'); // returns 4 + * net.isIP('127.000.000.001'); // returns 0 + * net.isIP('127.0.0.1/24'); // returns 0 + * net.isIP('fhqwhgads'); // returns 0 + * ``` + * @since v0.3.0 + */ + function isIP(input: string): number; + /** + * Returns `true` if `input` is an IPv4 address in [dot-decimal notation](https://en.wikipedia.org/wiki/Dot-decimal_notation) with no + * leading zeroes. Otherwise, returns `false`. + * + * ```js + * net.isIPv4('127.0.0.1'); // returns true + * net.isIPv4('127.000.000.001'); // returns false + * net.isIPv4('127.0.0.1/24'); // returns false + * net.isIPv4('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv4(input: string): boolean; + /** + * Returns `true` if `input` is an IPv6 address. Otherwise, returns `false`. + * + * ```js + * net.isIPv6('::1'); // returns true + * net.isIPv6('fhqwhgads'); // returns false + * ``` + * @since v0.3.0 + */ + function isIPv6(input: string): boolean; + interface SocketAddressInitOptions { + /** + * The network address as either an IPv4 or IPv6 string. + * @default 127.0.0.1 + */ + address?: string | undefined; + /** + * @default `'ipv4'` + */ + family?: IPVersion | undefined; + /** + * An IPv6 flow-label used only if `family` is `'ipv6'`. + * @default 0 + */ + flowlabel?: number | undefined; + /** + * An IP port. + * @default 0 + */ + port?: number | undefined; + } + /** + * @since v15.14.0, v14.18.0 + */ + class SocketAddress { + constructor(options: SocketAddressInitOptions); + /** + * @since v15.14.0, v14.18.0 + */ + readonly address: string; + /** + * Either \`'ipv4'\` or \`'ipv6'\`. + * @since v15.14.0, v14.18.0 + */ + readonly family: IPVersion; + /** + * @since v15.14.0, v14.18.0 + */ + readonly port: number; + /** + * @since v15.14.0, v14.18.0 + */ + readonly flowlabel: number; + } +} +declare module 'node:net' { + export * from 'net'; +} diff --git a/node_modules/@types/node/ts4.8/os.d.ts b/node_modules/@types/node/ts4.8/os.d.ts new file mode 100755 index 0000000..3c55599 --- /dev/null +++ b/node_modules/@types/node/ts4.8/os.d.ts @@ -0,0 +1,466 @@ +/** + * The `os` module provides operating system-related utility methods and + * properties. It can be accessed using: + * + * ```js + * const os = require('os'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/os.js) + */ +declare module 'os' { + interface CpuInfo { + model: string; + speed: number; + times: { + user: number; + nice: number; + sys: number; + idle: number; + irq: number; + }; + } + interface NetworkInterfaceBase { + address: string; + netmask: string; + mac: string; + internal: boolean; + cidr: string | null; + } + interface NetworkInterfaceInfoIPv4 extends NetworkInterfaceBase { + family: 'IPv4'; + scopeid?: undefined; + } + interface NetworkInterfaceInfoIPv6 extends NetworkInterfaceBase { + family: 'IPv6'; + scopeid: number; + } + interface UserInfo { + username: T; + uid: number; + gid: number; + shell: T; + homedir: T; + } + type NetworkInterfaceInfo = NetworkInterfaceInfoIPv4 | NetworkInterfaceInfoIPv6; + /** + * Returns the host name of the operating system as a string. + * @since v0.3.3 + */ + function hostname(): string; + /** + * Returns an array containing the 1, 5, and 15 minute load averages. + * + * The load average is a measure of system activity calculated by the operating + * system and expressed as a fractional number. + * + * The load average is a Unix-specific concept. On Windows, the return value is + * always `[0, 0, 0]`. + * @since v0.3.3 + */ + function loadavg(): number[]; + /** + * Returns the system uptime in number of seconds. + * @since v0.3.3 + */ + function uptime(): number; + /** + * Returns the amount of free system memory in bytes as an integer. + * @since v0.3.3 + */ + function freemem(): number; + /** + * Returns the total amount of system memory in bytes as an integer. + * @since v0.3.3 + */ + function totalmem(): number; + /** + * Returns an array of objects containing information about each logical CPU core. + * + * The properties included on each object include: + * + * ```js + * [ + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 252020, + * nice: 0, + * sys: 30340, + * idle: 1070356870, + * irq: 0 + * } + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 306960, + * nice: 0, + * sys: 26980, + * idle: 1071569080, + * irq: 0 + * } + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 248450, + * nice: 0, + * sys: 21750, + * idle: 1070919370, + * irq: 0 + * } + * }, + * { + * model: 'Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz', + * speed: 2926, + * times: { + * user: 256880, + * nice: 0, + * sys: 19430, + * idle: 1070905480, + * irq: 20 + * } + * }, + * ] + * ``` + * + * `nice` values are POSIX-only. On Windows, the `nice` values of all processors + * are always 0. + * @since v0.3.3 + */ + function cpus(): CpuInfo[]; + /** + * Returns the operating system name as returned by [`uname(3)`](https://linux.die.net/man/3/uname). For example, it + * returns `'Linux'` on Linux, `'Darwin'` on macOS, and `'Windows_NT'` on Windows. + * + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for additional information + * about the output of running [`uname(3)`](https://linux.die.net/man/3/uname) on various operating systems. + * @since v0.3.3 + */ + function type(): string; + /** + * Returns the operating system as a string. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `GetVersionExW()` is used. See + * [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v0.3.3 + */ + function release(): string; + /** + * Returns an object containing network interfaces that have been assigned a + * network address. + * + * Each key on the returned object identifies a network interface. The associated + * value is an array of objects that each describe an assigned network address. + * + * The properties available on the assigned network address object include: + * + * ```js + * { + * lo: [ + * { + * address: '127.0.0.1', + * netmask: '255.0.0.0', + * family: 'IPv4', + * mac: '00:00:00:00:00:00', + * internal: true, + * cidr: '127.0.0.1/8' + * }, + * { + * address: '::1', + * netmask: 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', + * family: 'IPv6', + * mac: '00:00:00:00:00:00', + * scopeid: 0, + * internal: true, + * cidr: '::1/128' + * } + * ], + * eth0: [ + * { + * address: '192.168.1.108', + * netmask: '255.255.255.0', + * family: 'IPv4', + * mac: '01:02:03:0a:0b:0c', + * internal: false, + * cidr: '192.168.1.108/24' + * }, + * { + * address: 'fe80::a00:27ff:fe4e:66a1', + * netmask: 'ffff:ffff:ffff:ffff::', + * family: 'IPv6', + * mac: '01:02:03:0a:0b:0c', + * scopeid: 1, + * internal: false, + * cidr: 'fe80::a00:27ff:fe4e:66a1/64' + * } + * ] + * } + * ``` + * @since v0.6.0 + */ + function networkInterfaces(): NodeJS.Dict; + /** + * Returns the string path of the current user's home directory. + * + * On POSIX, it uses the `$HOME` environment variable if defined. Otherwise it + * uses the [effective UID](https://en.wikipedia.org/wiki/User_identifier#Effective_user_ID) to look up the user's home directory. + * + * On Windows, it uses the `USERPROFILE` environment variable if defined. + * Otherwise it uses the path to the profile directory of the current user. + * @since v2.3.0 + */ + function homedir(): string; + /** + * Returns information about the currently effective user. On POSIX platforms, + * this is typically a subset of the password file. The returned object includes + * the `username`, `uid`, `gid`, `shell`, and `homedir`. On Windows, the `uid` and`gid` fields are `-1`, and `shell` is `null`. + * + * The value of `homedir` returned by `os.userInfo()` is provided by the operating + * system. This differs from the result of `os.homedir()`, which queries + * environment variables for the home directory before falling back to the + * operating system response. + * + * Throws a `SystemError` if a user has no `username` or `homedir`. + * @since v6.0.0 + */ + function userInfo(options: { encoding: 'buffer' }): UserInfo; + function userInfo(options?: { encoding: BufferEncoding }): UserInfo; + type SignalConstants = { + [key in NodeJS.Signals]: number; + }; + namespace constants { + const UV_UDP_REUSEADDR: number; + namespace signals {} + const signals: SignalConstants; + namespace errno { + const E2BIG: number; + const EACCES: number; + const EADDRINUSE: number; + const EADDRNOTAVAIL: number; + const EAFNOSUPPORT: number; + const EAGAIN: number; + const EALREADY: number; + const EBADF: number; + const EBADMSG: number; + const EBUSY: number; + const ECANCELED: number; + const ECHILD: number; + const ECONNABORTED: number; + const ECONNREFUSED: number; + const ECONNRESET: number; + const EDEADLK: number; + const EDESTADDRREQ: number; + const EDOM: number; + const EDQUOT: number; + const EEXIST: number; + const EFAULT: number; + const EFBIG: number; + const EHOSTUNREACH: number; + const EIDRM: number; + const EILSEQ: number; + const EINPROGRESS: number; + const EINTR: number; + const EINVAL: number; + const EIO: number; + const EISCONN: number; + const EISDIR: number; + const ELOOP: number; + const EMFILE: number; + const EMLINK: number; + const EMSGSIZE: number; + const EMULTIHOP: number; + const ENAMETOOLONG: number; + const ENETDOWN: number; + const ENETRESET: number; + const ENETUNREACH: number; + const ENFILE: number; + const ENOBUFS: number; + const ENODATA: number; + const ENODEV: number; + const ENOENT: number; + const ENOEXEC: number; + const ENOLCK: number; + const ENOLINK: number; + const ENOMEM: number; + const ENOMSG: number; + const ENOPROTOOPT: number; + const ENOSPC: number; + const ENOSR: number; + const ENOSTR: number; + const ENOSYS: number; + const ENOTCONN: number; + const ENOTDIR: number; + const ENOTEMPTY: number; + const ENOTSOCK: number; + const ENOTSUP: number; + const ENOTTY: number; + const ENXIO: number; + const EOPNOTSUPP: number; + const EOVERFLOW: number; + const EPERM: number; + const EPIPE: number; + const EPROTO: number; + const EPROTONOSUPPORT: number; + const EPROTOTYPE: number; + const ERANGE: number; + const EROFS: number; + const ESPIPE: number; + const ESRCH: number; + const ESTALE: number; + const ETIME: number; + const ETIMEDOUT: number; + const ETXTBSY: number; + const EWOULDBLOCK: number; + const EXDEV: number; + const WSAEINTR: number; + const WSAEBADF: number; + const WSAEACCES: number; + const WSAEFAULT: number; + const WSAEINVAL: number; + const WSAEMFILE: number; + const WSAEWOULDBLOCK: number; + const WSAEINPROGRESS: number; + const WSAEALREADY: number; + const WSAENOTSOCK: number; + const WSAEDESTADDRREQ: number; + const WSAEMSGSIZE: number; + const WSAEPROTOTYPE: number; + const WSAENOPROTOOPT: number; + const WSAEPROTONOSUPPORT: number; + const WSAESOCKTNOSUPPORT: number; + const WSAEOPNOTSUPP: number; + const WSAEPFNOSUPPORT: number; + const WSAEAFNOSUPPORT: number; + const WSAEADDRINUSE: number; + const WSAEADDRNOTAVAIL: number; + const WSAENETDOWN: number; + const WSAENETUNREACH: number; + const WSAENETRESET: number; + const WSAECONNABORTED: number; + const WSAECONNRESET: number; + const WSAENOBUFS: number; + const WSAEISCONN: number; + const WSAENOTCONN: number; + const WSAESHUTDOWN: number; + const WSAETOOMANYREFS: number; + const WSAETIMEDOUT: number; + const WSAECONNREFUSED: number; + const WSAELOOP: number; + const WSAENAMETOOLONG: number; + const WSAEHOSTDOWN: number; + const WSAEHOSTUNREACH: number; + const WSAENOTEMPTY: number; + const WSAEPROCLIM: number; + const WSAEUSERS: number; + const WSAEDQUOT: number; + const WSAESTALE: number; + const WSAEREMOTE: number; + const WSASYSNOTREADY: number; + const WSAVERNOTSUPPORTED: number; + const WSANOTINITIALISED: number; + const WSAEDISCON: number; + const WSAENOMORE: number; + const WSAECANCELLED: number; + const WSAEINVALIDPROCTABLE: number; + const WSAEINVALIDPROVIDER: number; + const WSAEPROVIDERFAILEDINIT: number; + const WSASYSCALLFAILURE: number; + const WSASERVICE_NOT_FOUND: number; + const WSATYPE_NOT_FOUND: number; + const WSA_E_NO_MORE: number; + const WSA_E_CANCELLED: number; + const WSAEREFUSED: number; + } + namespace priority { + const PRIORITY_LOW: number; + const PRIORITY_BELOW_NORMAL: number; + const PRIORITY_NORMAL: number; + const PRIORITY_ABOVE_NORMAL: number; + const PRIORITY_HIGH: number; + const PRIORITY_HIGHEST: number; + } + } + const devNull: string; + const EOL: string; + /** + * Returns the operating system CPU architecture for which the Node.js binary was + * compiled. Possible values are `'arm'`, `'arm64'`, `'ia32'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * The return value is equivalent to `process.arch`. + * @since v0.5.0 + */ + function arch(): string; + /** + * Returns a string identifying the kernel version. + * + * On POSIX systems, the operating system release is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). On Windows, `RtlGetVersion()` is used, and if it is not + * available, `GetVersionExW()` will be used. See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v13.11.0, v12.17.0 + */ + function version(): string; + /** + * Returns a string identifying the operating system platform for which + * the Node.js binary was compiled. The value is set at compile time. + * Possible values are `'aix'`, `'darwin'`, `'freebsd'`,`'linux'`,`'openbsd'`, `'sunos'`, and `'win32'`. + * + * The return value is equivalent to `process.platform`. + * + * The value `'android'` may also be returned if Node.js is built on the Android + * operating system. [Android support is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.5.0 + */ + function platform(): NodeJS.Platform; + /** + * Returns the machine type as a string, such as arm, aarch64, mips, mips64, ppc64, ppc64le, s390, s390x, i386, i686, x86_64. + * + * On POSIX systems, the machine type is determined by calling [`uname(3)`](https://linux.die.net/man/3/uname). + * On Windows, `RtlGetVersion()` is used, and if it is not available, `GetVersionExW()` will be used. + * See [https://en.wikipedia.org/wiki/Uname#Examples](https://en.wikipedia.org/wiki/Uname#Examples) for more information. + * @since v18.9.0 + */ + function machine(): string; + /** + * Returns the operating system's default directory for temporary files as a + * string. + * @since v0.9.9 + */ + function tmpdir(): string; + /** + * Returns a string identifying the endianness of the CPU for which the Node.js + * binary was compiled. + * + * Possible values are `'BE'` for big endian and `'LE'` for little endian. + * @since v0.9.4 + */ + function endianness(): 'BE' | 'LE'; + /** + * Returns the scheduling priority for the process specified by `pid`. If `pid` is + * not provided or is `0`, the priority of the current process is returned. + * @since v10.10.0 + * @param [pid=0] The process ID to retrieve scheduling priority for. + */ + function getPriority(pid?: number): number; + /** + * Attempts to set the scheduling priority for the process specified by `pid`. If`pid` is not provided or is `0`, the process ID of the current process is used. + * + * The `priority` input must be an integer between `-20` (high priority) and `19`(low priority). Due to differences between Unix priority levels and Windows + * priority classes, `priority` is mapped to one of six priority constants in`os.constants.priority`. When retrieving a process priority level, this range + * mapping may cause the return value to be slightly different on Windows. To avoid + * confusion, set `priority` to one of the priority constants. + * + * On Windows, setting priority to `PRIORITY_HIGHEST` requires elevated user + * privileges. Otherwise the set priority will be silently reduced to`PRIORITY_HIGH`. + * @since v10.10.0 + * @param [pid=0] The process ID to set scheduling priority for. + * @param priority The scheduling priority to assign to the process. + */ + function setPriority(priority: number): void; + function setPriority(pid: number, priority: number): void; +} +declare module 'node:os' { + export * from 'os'; +} diff --git a/node_modules/@types/node/ts4.8/path.d.ts b/node_modules/@types/node/ts4.8/path.d.ts new file mode 100755 index 0000000..1d33f79 --- /dev/null +++ b/node_modules/@types/node/ts4.8/path.d.ts @@ -0,0 +1,191 @@ +declare module 'path/posix' { + import path = require('path'); + export = path; +} +declare module 'path/win32' { + import path = require('path'); + export = path; +} +/** + * The `path` module provides utilities for working with file and directory paths. + * It can be accessed using: + * + * ```js + * const path = require('path'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/path.js) + */ +declare module 'path' { + namespace path { + /** + * A parsed path object generated by path.parse() or consumed by path.format(). + */ + interface ParsedPath { + /** + * The root of the path such as '/' or 'c:\' + */ + root: string; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir: string; + /** + * The file name including extension (if any) such as 'index.html' + */ + base: string; + /** + * The file extension (if any) such as '.html' + */ + ext: string; + /** + * The file name without extension (if any) such as 'index' + */ + name: string; + } + interface FormatInputPathObject { + /** + * The root of the path such as '/' or 'c:\' + */ + root?: string | undefined; + /** + * The full directory path such as '/home/user/dir' or 'c:\path\dir' + */ + dir?: string | undefined; + /** + * The file name including extension (if any) such as 'index.html' + */ + base?: string | undefined; + /** + * The file extension (if any) such as '.html' + */ + ext?: string | undefined; + /** + * The file name without extension (if any) such as 'index' + */ + name?: string | undefined; + } + interface PlatformPath { + /** + * Normalize a string path, reducing '..' and '.' parts. + * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. + * + * @param path string path to normalize. + * @throws {TypeError} if `path` is not a string. + */ + normalize(path: string): string; + /** + * Join all arguments together and normalize the resulting path. + * + * @param paths paths to join. + * @throws {TypeError} if any of the path segments is not a string. + */ + join(...paths: string[]): string; + /** + * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. + * + * Starting from leftmost {from} parameter, resolves {to} to an absolute path. + * + * If {to} isn't already absolute, {from} arguments are prepended in right to left order, + * until an absolute path is found. If after using all {from} paths still no absolute path is found, + * the current working directory is used as well. The resulting path is normalized, + * and trailing slashes are removed unless the path gets resolved to the root directory. + * + * @param paths A sequence of paths or path segments. + * @throws {TypeError} if any of the arguments is not a string. + */ + resolve(...paths: string[]): string; + /** + * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. + * + * If the given {path} is a zero-length string, `false` will be returned. + * + * @param path path to test. + * @throws {TypeError} if `path` is not a string. + */ + isAbsolute(path: string): boolean; + /** + * Solve the relative path from {from} to {to} based on the current working directory. + * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. + * + * @throws {TypeError} if either `from` or `to` is not a string. + */ + relative(from: string, to: string): string; + /** + * Return the directory name of a path. Similar to the Unix dirname command. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + dirname(path: string): string; + /** + * Return the last portion of a path. Similar to the Unix basename command. + * Often used to extract the file name from a fully qualified path. + * + * @param path the path to evaluate. + * @param suffix optionally, an extension to remove from the result. + * @throws {TypeError} if `path` is not a string or if `ext` is given and is not a string. + */ + basename(path: string, suffix?: string): string; + /** + * Return the extension of the path, from the last '.' to end of string in the last portion of the path. + * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string. + * + * @param path the path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + extname(path: string): string; + /** + * The platform-specific file separator. '\\' or '/'. + */ + readonly sep: '\\' | '/'; + /** + * The platform-specific file delimiter. ';' or ':'. + */ + readonly delimiter: ';' | ':'; + /** + * Returns an object from a path string - the opposite of format(). + * + * @param path path to evaluate. + * @throws {TypeError} if `path` is not a string. + */ + parse(path: string): ParsedPath; + /** + * Returns a path string from an object - the opposite of parse(). + * + * @param pathObject path to evaluate. + */ + format(pathObject: FormatInputPathObject): string; + /** + * On Windows systems only, returns an equivalent namespace-prefixed path for the given path. + * If path is not a string, path will be returned without modifications. + * This method is meaningful only on Windows system. + * On POSIX systems, the method is non-operational and always returns path without modifications. + */ + toNamespacedPath(path: string): string; + /** + * Posix specific pathing. + * Same as parent object on posix. + */ + readonly posix: PlatformPath; + /** + * Windows specific pathing. + * Same as parent object on windows + */ + readonly win32: PlatformPath; + } + } + const path: path.PlatformPath; + export = path; +} +declare module 'node:path' { + import path = require('path'); + export = path; +} +declare module 'node:path/posix' { + import path = require('path/posix'); + export = path; +} +declare module 'node:path/win32' { + import path = require('path/win32'); + export = path; +} diff --git a/node_modules/@types/node/ts4.8/perf_hooks.d.ts b/node_modules/@types/node/ts4.8/perf_hooks.d.ts new file mode 100755 index 0000000..5c0b228 --- /dev/null +++ b/node_modules/@types/node/ts4.8/perf_hooks.d.ts @@ -0,0 +1,625 @@ +/** + * This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for + * Node.js-specific performance measurements. + * + * Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/): + * + * * [High Resolution Time](https://www.w3.org/TR/hr-time-2) + * * [Performance Timeline](https://w3c.github.io/performance-timeline/) + * * [User Timing](https://www.w3.org/TR/user-timing/) + * + * ```js + * const { PerformanceObserver, performance } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((items) => { + * console.log(items.getEntries()[0].duration); + * performance.clearMarks(); + * }); + * obs.observe({ type: 'measure' }); + * performance.measure('Start to Now'); + * + * performance.mark('A'); + * doSomeLongRunningProcess(() => { + * performance.measure('A to Now', 'A'); + * + * performance.mark('B'); + * performance.measure('A to B', 'A', 'B'); + * }); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/perf_hooks.js) + */ +declare module 'perf_hooks' { + import { AsyncResource } from 'node:async_hooks'; + type EntryType = 'node' | 'mark' | 'measure' | 'gc' | 'function' | 'http2' | 'http'; + interface NodeGCPerformanceDetail { + /** + * When `performanceEntry.entryType` is equal to 'gc', `the performance.kind` property identifies + * the type of garbage collection operation that occurred. + * See perf_hooks.constants for valid values. + */ + readonly kind?: number | undefined; + /** + * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags` + * property contains additional information about garbage collection operation. + * See perf_hooks.constants for valid values. + */ + readonly flags?: number | undefined; + } + /** + * @since v8.5.0 + */ + class PerformanceEntry { + protected constructor(); + /** + * The total number of milliseconds elapsed for this entry. This value will not + * be meaningful for all Performance Entry types. + * @since v8.5.0 + */ + readonly duration: number; + /** + * The name of the performance entry. + * @since v8.5.0 + */ + readonly name: string; + /** + * The high resolution millisecond timestamp marking the starting time of the + * Performance Entry. + * @since v8.5.0 + */ + readonly startTime: number; + /** + * The type of the performance entry. It may be one of: + * + * * `'node'` (Node.js only) + * * `'mark'` (available on the Web) + * * `'measure'` (available on the Web) + * * `'gc'` (Node.js only) + * * `'function'` (Node.js only) + * * `'http2'` (Node.js only) + * * `'http'` (Node.js only) + * @since v8.5.0 + */ + readonly entryType: EntryType; + /** + * Additional detail specific to the `entryType`. + * @since v16.0.0 + */ + readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type. + toJSON(): any; + } + class PerformanceMark extends PerformanceEntry { + readonly duration: 0; + readonly entryType: 'mark'; + } + class PerformanceMeasure extends PerformanceEntry { + readonly entryType: 'measure'; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Provides timing details for Node.js itself. The constructor of this class + * is not exposed to users. + * @since v8.5.0 + */ + class PerformanceNodeTiming extends PerformanceEntry { + /** + * The high resolution millisecond timestamp at which the Node.js process + * completed bootstrapping. If bootstrapping has not yet finished, the property + * has the value of -1. + * @since v8.5.0 + */ + readonly bootstrapComplete: number; + /** + * The high resolution millisecond timestamp at which the Node.js environment was + * initialized. + * @since v8.5.0 + */ + readonly environment: number; + /** + * The high resolution millisecond timestamp of the amount of time the event loop + * has been idle within the event loop's event provider (e.g. `epoll_wait`). This + * does not take CPU usage into consideration. If the event loop has not yet + * started (e.g., in the first tick of the main script), the property has the + * value of 0. + * @since v14.10.0, v12.19.0 + */ + readonly idleTime: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * exited. If the event loop has not yet exited, the property has the value of -1\. + * It can only have a value of not -1 in a handler of the `'exit'` event. + * @since v8.5.0 + */ + readonly loopExit: number; + /** + * The high resolution millisecond timestamp at which the Node.js event loop + * started. If the event loop has not yet started (e.g., in the first tick of the + * main script), the property has the value of -1. + * @since v8.5.0 + */ + readonly loopStart: number; + /** + * The high resolution millisecond timestamp at which the V8 platform was + * initialized. + * @since v8.5.0 + */ + readonly v8Start: number; + } + interface EventLoopUtilization { + idle: number; + active: number; + utilization: number; + } + /** + * @param util1 The result of a previous call to eventLoopUtilization() + * @param util2 The result of a previous call to eventLoopUtilization() prior to util1 + */ + type EventLoopUtilityFunction = (util1?: EventLoopUtilization, util2?: EventLoopUtilization) => EventLoopUtilization; + interface MarkOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * An optional timestamp to be used as the mark time. + * @default `performance.now()`. + */ + startTime?: number | undefined; + } + interface MeasureOptions { + /** + * Additional optional detail to include with the mark. + */ + detail?: unknown | undefined; + /** + * Duration between start and end times. + */ + duration?: number | undefined; + /** + * Timestamp to be used as the end time, or a string identifying a previously recorded mark. + */ + end?: number | string | undefined; + /** + * Timestamp to be used as the start time, or a string identifying a previously recorded mark. + */ + start?: number | string | undefined; + } + interface TimerifyOptions { + /** + * A histogram object created using + * `perf_hooks.createHistogram()` that will record runtime durations in + * nanoseconds. + */ + histogram?: RecordableHistogram | undefined; + } + interface Performance { + /** + * If name is not provided, removes all PerformanceMark objects from the Performance Timeline. + * If name is provided, removes only the named mark. + * @param name + */ + clearMarks(name?: string): void; + /** + * If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline. + * If name is provided, removes only the named measure. + * @param name + * @since v16.7.0 + */ + clearMeasures(name?: string): void; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`. + * If you are only interested in performance entries of certain types or that have certain names, see + * `performance.getEntriesByType()` and `performance.getEntriesByName()`. + * @since v16.7.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`. + * @param name + * @param type + * @since v16.7.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime` + * whose `performanceEntry.entryType` is equal to `type`. + * @param type + * @since v16.7.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + /** + * Creates a new PerformanceMark entry in the Performance Timeline. + * A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark', + * and whose performanceEntry.duration is always 0. + * Performance marks are used to mark specific significant moments in the Performance Timeline. + * @param name + * @return The PerformanceMark entry that was created + */ + mark(name?: string, options?: MarkOptions): PerformanceMark; + /** + * Creates a new PerformanceMeasure entry in the Performance Timeline. + * A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure', + * and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark. + * + * The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify + * any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist, + * then startMark is set to timeOrigin by default. + * + * The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp + * properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown. + * @param name + * @param startMark + * @param endMark + * @return The PerformanceMeasure entry that was created + */ + measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure; + measure(name: string, options: MeasureOptions): PerformanceMeasure; + /** + * An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones. + */ + readonly nodeTiming: PerformanceNodeTiming; + /** + * @return the current high resolution millisecond timestamp + */ + now(): number; + /** + * The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured. + */ + readonly timeOrigin: number; + /** + * Wraps a function within a new function that measures the running time of the wrapped function. + * A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed. + * @param fn + */ + timerify any>(fn: T, options?: TimerifyOptions): T; + /** + * eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time. + * It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait). + * No other CPU idle time is taken into consideration. + */ + eventLoopUtilization: EventLoopUtilityFunction; + } + interface PerformanceObserverEntryList { + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime`. + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntries()); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 81.465639, + * * duration: 0 + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 81.860064, + * * duration: 0 + * * } + * * ] + * + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntries(): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is + * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`. + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByName('meow')); + * + * * [ + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 98.545991, + * * duration: 0 + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('nope')); // [] + * + * console.log(perfObserverList.getEntriesByName('test', 'mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 63.518931, + * * duration: 0 + * * } + * * ] + * + * console.log(perfObserverList.getEntriesByName('test', 'measure')); // [] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ entryTypes: ['mark', 'measure'] }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByName(name: string, type?: EntryType): PerformanceEntry[]; + /** + * Returns a list of `PerformanceEntry` objects in chronological order + * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`is equal to `type`. + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((perfObserverList, observer) => { + * console.log(perfObserverList.getEntriesByType('mark')); + * + * * [ + * * PerformanceEntry { + * * name: 'test', + * * entryType: 'mark', + * * startTime: 55.897834, + * * duration: 0 + * * }, + * * PerformanceEntry { + * * name: 'meow', + * * entryType: 'mark', + * * startTime: 56.350146, + * * duration: 0 + * * } + * * ] + * + * performance.clearMarks(); + * performance.clearMeasures(); + * observer.disconnect(); + * }); + * obs.observe({ type: 'mark' }); + * + * performance.mark('test'); + * performance.mark('meow'); + * ``` + * @since v8.5.0 + */ + getEntriesByType(type: EntryType): PerformanceEntry[]; + } + type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void; + class PerformanceObserver extends AsyncResource { + constructor(callback: PerformanceObserverCallback); + /** + * Disconnects the `PerformanceObserver` instance from all notifications. + * @since v8.5.0 + */ + disconnect(): void; + /** + * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes`or `options.type`: + * + * ```js + * const { + * performance, + * PerformanceObserver + * } = require('perf_hooks'); + * + * const obs = new PerformanceObserver((list, observer) => { + * // Called once asynchronously. `list` contains three items. + * }); + * obs.observe({ type: 'mark' }); + * + * for (let n = 0; n < 3; n++) + * performance.mark(`test${n}`); + * ``` + * @since v8.5.0 + */ + observe( + options: + | { + entryTypes: ReadonlyArray; + buffered?: boolean | undefined; + } + | { + type: EntryType; + buffered?: boolean | undefined; + } + ): void; + } + namespace constants { + const NODE_PERFORMANCE_GC_MAJOR: number; + const NODE_PERFORMANCE_GC_MINOR: number; + const NODE_PERFORMANCE_GC_INCREMENTAL: number; + const NODE_PERFORMANCE_GC_WEAKCB: number; + const NODE_PERFORMANCE_GC_FLAGS_NO: number; + const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number; + const NODE_PERFORMANCE_GC_FLAGS_FORCED: number; + const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number; + const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number; + const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number; + } + const performance: Performance; + interface EventLoopMonitorOptions { + /** + * The sampling rate in milliseconds. + * Must be greater than zero. + * @default 10 + */ + resolution?: number | undefined; + } + interface Histogram { + /** + * Returns a `Map` object detailing the accumulated percentile distribution. + * @since v11.10.0 + */ + readonly percentiles: Map; + /** + * The number of times the event loop delay exceeded the maximum 1 hour event + * loop delay threshold. + * @since v11.10.0 + */ + readonly exceeds: number; + /** + * The minimum recorded event loop delay. + * @since v11.10.0 + */ + readonly min: number; + /** + * The maximum recorded event loop delay. + * @since v11.10.0 + */ + readonly max: number; + /** + * The mean of the recorded event loop delays. + * @since v11.10.0 + */ + readonly mean: number; + /** + * The standard deviation of the recorded event loop delays. + * @since v11.10.0 + */ + readonly stddev: number; + /** + * Resets the collected histogram data. + * @since v11.10.0 + */ + reset(): void; + /** + * Returns the value at the given percentile. + * @since v11.10.0 + * @param percentile A percentile value in the range (0, 100]. + */ + percentile(percentile: number): number; + } + interface IntervalHistogram extends Histogram { + /** + * Enables the update interval timer. Returns `true` if the timer was + * started, `false` if it was already started. + * @since v11.10.0 + */ + enable(): boolean; + /** + * Disables the update interval timer. Returns `true` if the timer was + * stopped, `false` if it was already stopped. + * @since v11.10.0 + */ + disable(): boolean; + } + interface RecordableHistogram extends Histogram { + /** + * @since v15.9.0, v14.18.0 + * @param val The amount to record in the histogram. + */ + record(val: number | bigint): void; + /** + * Calculates the amount of time (in nanoseconds) that has passed since the + * previous call to `recordDelta()` and records that amount in the histogram. + * + * ## Examples + * @since v15.9.0, v14.18.0 + */ + recordDelta(): void; + /** + * Adds the values from other to this histogram. + * @since v17.4.0, v16.14.0 + * @param other Recordable Histogram to combine with + */ + add(other: RecordableHistogram): void; + } + /** + * _This property is an extension by Node.js. It is not available in Web browsers._ + * + * Creates an `IntervalHistogram` object that samples and reports the event loop + * delay over time. The delays will be reported in nanoseconds. + * + * Using a timer to detect approximate event loop delay works because the + * execution of timers is tied specifically to the lifecycle of the libuv + * event loop. That is, a delay in the loop will cause a delay in the execution + * of the timer, and those delays are specifically what this API is intended to + * detect. + * + * ```js + * const { monitorEventLoopDelay } = require('perf_hooks'); + * const h = monitorEventLoopDelay({ resolution: 20 }); + * h.enable(); + * // Do something. + * h.disable(); + * console.log(h.min); + * console.log(h.max); + * console.log(h.mean); + * console.log(h.stddev); + * console.log(h.percentiles); + * console.log(h.percentile(50)); + * console.log(h.percentile(99)); + * ``` + * @since v11.10.0 + */ + function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram; + interface CreateHistogramOptions { + /** + * The minimum recordable value. Must be an integer value greater than 0. + * @default 1 + */ + min?: number | bigint | undefined; + /** + * The maximum recordable value. Must be an integer value greater than min. + * @default Number.MAX_SAFE_INTEGER + */ + max?: number | bigint | undefined; + /** + * The number of accuracy digits. Must be a number between 1 and 5. + * @default 3 + */ + figures?: number | undefined; + } + /** + * Returns a `RecordableHistogram`. + * @since v15.9.0, v14.18.0 + */ + function createHistogram(options?: CreateHistogramOptions): RecordableHistogram; + + import { performance as _performance } from 'perf_hooks'; + global { + /** + * `performance` is a global reference for `require('perf_hooks').performance` + * https://nodejs.org/api/globals.html#performance + * @since v16.0.0 + */ + var performance: typeof globalThis extends { + onmessage: any; + performance: infer T; + } + ? T + : typeof _performance; + } +} +declare module 'node:perf_hooks' { + export * from 'perf_hooks'; +} diff --git a/node_modules/@types/node/ts4.8/process.d.ts b/node_modules/@types/node/ts4.8/process.d.ts new file mode 100755 index 0000000..12148f9 --- /dev/null +++ b/node_modules/@types/node/ts4.8/process.d.ts @@ -0,0 +1,1482 @@ +declare module 'process' { + import * as tty from 'node:tty'; + import { Worker } from 'node:worker_threads'; + global { + var process: NodeJS.Process; + namespace NodeJS { + // this namespace merge is here because these are specifically used + // as the type for process.stdin, process.stdout, and process.stderr. + // they can't live in tty.d.ts because we need to disambiguate the imported name. + interface ReadStream extends tty.ReadStream {} + interface WriteStream extends tty.WriteStream {} + interface MemoryUsageFn { + /** + * The `process.memoryUsage()` method iterate over each page to gather informations about memory + * usage which can be slow depending on the program memory allocations. + */ + (): MemoryUsage; + /** + * method returns an integer representing the Resident Set Size (RSS) in bytes. + */ + rss(): number; + } + interface MemoryUsage { + rss: number; + heapTotal: number; + heapUsed: number; + external: number; + arrayBuffers: number; + } + interface CpuUsage { + user: number; + system: number; + } + interface ProcessRelease { + name: string; + sourceUrl?: string | undefined; + headersUrl?: string | undefined; + libUrl?: string | undefined; + lts?: string | undefined; + } + interface ProcessVersions extends Dict { + http_parser: string; + node: string; + v8: string; + ares: string; + uv: string; + zlib: string; + modules: string; + openssl: string; + } + type Platform = 'aix' | 'android' | 'darwin' | 'freebsd' | 'haiku' | 'linux' | 'openbsd' | 'sunos' | 'win32' | 'cygwin' | 'netbsd'; + type Architecture = 'arm' | 'arm64' | 'ia32' | 'mips' | 'mipsel' | 'ppc' | 'ppc64' | 's390' | 's390x' | 'x64'; + type Signals = + | 'SIGABRT' + | 'SIGALRM' + | 'SIGBUS' + | 'SIGCHLD' + | 'SIGCONT' + | 'SIGFPE' + | 'SIGHUP' + | 'SIGILL' + | 'SIGINT' + | 'SIGIO' + | 'SIGIOT' + | 'SIGKILL' + | 'SIGPIPE' + | 'SIGPOLL' + | 'SIGPROF' + | 'SIGPWR' + | 'SIGQUIT' + | 'SIGSEGV' + | 'SIGSTKFLT' + | 'SIGSTOP' + | 'SIGSYS' + | 'SIGTERM' + | 'SIGTRAP' + | 'SIGTSTP' + | 'SIGTTIN' + | 'SIGTTOU' + | 'SIGUNUSED' + | 'SIGURG' + | 'SIGUSR1' + | 'SIGUSR2' + | 'SIGVTALRM' + | 'SIGWINCH' + | 'SIGXCPU' + | 'SIGXFSZ' + | 'SIGBREAK' + | 'SIGLOST' + | 'SIGINFO'; + type UncaughtExceptionOrigin = 'uncaughtException' | 'unhandledRejection'; + type MultipleResolveType = 'resolve' | 'reject'; + type BeforeExitListener = (code: number) => void; + type DisconnectListener = () => void; + type ExitListener = (code: number) => void; + type RejectionHandledListener = (promise: Promise) => void; + type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void; + /** + * Most of the time the unhandledRejection will be an Error, but this should not be relied upon + * as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error. + */ + type UnhandledRejectionListener = (reason: unknown, promise: Promise) => void; + type WarningListener = (warning: Error) => void; + type MessageListener = (message: unknown, sendHandle: unknown) => void; + type SignalsListener = (signal: Signals) => void; + type MultipleResolveListener = (type: MultipleResolveType, promise: Promise, value: unknown) => void; + type WorkerListener = (worker: Worker) => void; + interface Socket extends ReadWriteStream { + isTTY?: true | undefined; + } + // Alias for compatibility + interface ProcessEnv extends Dict { + /** + * Can be used to change the default timezone at runtime + */ + TZ?: string; + } + interface HRTime { + (time?: [number, number]): [number, number]; + bigint(): bigint; + } + interface ProcessReport { + /** + * Directory where the report is written. + * working directory of the Node.js process. + * @default '' indicating that reports are written to the current + */ + directory: string; + /** + * Filename where the report is written. + * The default value is the empty string. + * @default '' the output filename will be comprised of a timestamp, + * PID, and sequence number. + */ + filename: string; + /** + * Returns a JSON-formatted diagnostic report for the running process. + * The report's JavaScript stack trace is taken from err, if present. + */ + getReport(err?: Error): string; + /** + * If true, a diagnostic report is generated on fatal errors, + * such as out of memory errors or failed C++ assertions. + * @default false + */ + reportOnFatalError: boolean; + /** + * If true, a diagnostic report is generated when the process + * receives the signal specified by process.report.signal. + * @default false + */ + reportOnSignal: boolean; + /** + * If true, a diagnostic report is generated on uncaught exception. + * @default false + */ + reportOnUncaughtException: boolean; + /** + * The signal used to trigger the creation of a diagnostic report. + * @default 'SIGUSR2' + */ + signal: Signals; + /** + * Writes a diagnostic report to a file. If filename is not provided, the default filename + * includes the date, time, PID, and a sequence number. + * The report's JavaScript stack trace is taken from err, if present. + * + * @param fileName Name of the file where the report is written. + * This should be a relative path, that will be appended to the directory specified in + * `process.report.directory`, or the current working directory of the Node.js process, + * if unspecified. + * @param error A custom error used for reporting the JavaScript stack. + * @return Filename of the generated report. + */ + writeReport(fileName?: string): string; + writeReport(error?: Error): string; + writeReport(fileName?: string, err?: Error): string; + } + interface ResourceUsage { + fsRead: number; + fsWrite: number; + involuntaryContextSwitches: number; + ipcReceived: number; + ipcSent: number; + majorPageFault: number; + maxRSS: number; + minorPageFault: number; + sharedMemorySize: number; + signalsCount: number; + swappedOut: number; + systemCPUTime: number; + unsharedDataSize: number; + unsharedStackSize: number; + userCPUTime: number; + voluntaryContextSwitches: number; + } + interface EmitWarningOptions { + /** + * When `warning` is a `string`, `type` is the name to use for the _type_ of warning being emitted. + * + * @default 'Warning' + */ + type?: string | undefined; + /** + * A unique identifier for the warning instance being emitted. + */ + code?: string | undefined; + /** + * When `warning` is a `string`, `ctor` is an optional function used to limit the generated stack trace. + * + * @default process.emitWarning + */ + ctor?: Function | undefined; + /** + * Additional text to include with the error. + */ + detail?: string | undefined; + } + interface ProcessConfig { + readonly target_defaults: { + readonly cflags: any[]; + readonly default_configuration: string; + readonly defines: string[]; + readonly include_dirs: string[]; + readonly libraries: string[]; + }; + readonly variables: { + readonly clang: number; + readonly host_arch: string; + readonly node_install_npm: boolean; + readonly node_install_waf: boolean; + readonly node_prefix: string; + readonly node_shared_openssl: boolean; + readonly node_shared_v8: boolean; + readonly node_shared_zlib: boolean; + readonly node_use_dtrace: boolean; + readonly node_use_etw: boolean; + readonly node_use_openssl: boolean; + readonly target_arch: string; + readonly v8_no_strict_aliasing: number; + readonly v8_use_snapshot: boolean; + readonly visibility: string; + }; + } + interface Process extends EventEmitter { + /** + * The `process.stdout` property returns a stream connected to`stdout` (fd `1`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `1` refers to a file, in which case it is + * a `Writable` stream. + * + * For example, to copy `process.stdin` to `process.stdout`: + * + * ```js + * import { stdin, stdout } from 'process'; + * + * stdin.pipe(stdout); + * ``` + * + * `process.stdout` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stdout: WriteStream & { + fd: 1; + }; + /** + * The `process.stderr` property returns a stream connected to`stderr` (fd `2`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `2` refers to a file, in which case it is + * a `Writable` stream. + * + * `process.stderr` differs from other Node.js streams in important ways. See `note on process I/O` for more information. + */ + stderr: WriteStream & { + fd: 2; + }; + /** + * The `process.stdin` property returns a stream connected to`stdin` (fd `0`). It is a `net.Socket` (which is a `Duplex` stream) unless fd `0` refers to a file, in which case it is + * a `Readable` stream. + * + * For details of how to read from `stdin` see `readable.read()`. + * + * As a `Duplex` stream, `process.stdin` can also be used in "old" mode that + * is compatible with scripts written for Node.js prior to v0.10\. + * For more information see `Stream compatibility`. + * + * In "old" streams mode the `stdin` stream is paused by default, so one + * must call `process.stdin.resume()` to read from it. Note also that calling`process.stdin.resume()` itself would switch stream to "old" mode. + */ + stdin: ReadStream & { + fd: 0; + }; + openStdin(): Socket; + /** + * The `process.argv` property returns an array containing the command-line + * arguments passed when the Node.js process was launched. The first element will + * be {@link execPath}. See `process.argv0` if access to the original value + * of `argv[0]` is needed. The second element will be the path to the JavaScript + * file being executed. The remaining elements will be any additional command-line + * arguments. + * + * For example, assuming the following script for `process-args.js`: + * + * ```js + * import { argv } from 'process'; + * + * // print process.argv + * argv.forEach((val, index) => { + * console.log(`${index}: ${val}`); + * }); + * ``` + * + * Launching the Node.js process as: + * + * ```console + * $ node process-args.js one two=three four + * ``` + * + * Would generate the output: + * + * ```text + * 0: /usr/local/bin/node + * 1: /Users/mjr/work/node/process-args.js + * 2: one + * 3: two=three + * 4: four + * ``` + * @since v0.1.27 + */ + argv: string[]; + /** + * The `process.argv0` property stores a read-only copy of the original value of`argv[0]` passed when Node.js starts. + * + * ```console + * $ bash -c 'exec -a customArgv0 ./node' + * > process.argv[0] + * '/Volumes/code/external/node/out/Release/node' + * > process.argv0 + * 'customArgv0' + * ``` + * @since v6.4.0 + */ + argv0: string; + /** + * The `process.execArgv` property returns the set of Node.js-specific command-line + * options passed when the Node.js process was launched. These options do not + * appear in the array returned by the {@link argv} property, and do not + * include the Node.js executable, the name of the script, or any options following + * the script name. These options are useful in order to spawn child processes with + * the same execution environment as the parent. + * + * ```console + * $ node --harmony script.js --version + * ``` + * + * Results in `process.execArgv`: + * + * ```js + * ['--harmony'] + * ``` + * + * And `process.argv`: + * + * ```js + * ['/usr/local/bin/node', 'script.js', '--version'] + * ``` + * + * Refer to `Worker constructor` for the detailed behavior of worker + * threads with this property. + * @since v0.7.7 + */ + execArgv: string[]; + /** + * The `process.execPath` property returns the absolute pathname of the executable + * that started the Node.js process. Symbolic links, if any, are resolved. + * + * ```js + * '/usr/local/bin/node' + * ``` + * @since v0.1.100 + */ + execPath: string; + /** + * The `process.abort()` method causes the Node.js process to exit immediately and + * generate a core file. + * + * This feature is not available in `Worker` threads. + * @since v0.7.0 + */ + abort(): never; + /** + * The `process.chdir()` method changes the current working directory of the + * Node.js process or throws an exception if doing so fails (for instance, if + * the specified `directory` does not exist). + * + * ```js + * import { chdir, cwd } from 'process'; + * + * console.log(`Starting directory: ${cwd()}`); + * try { + * chdir('/tmp'); + * console.log(`New directory: ${cwd()}`); + * } catch (err) { + * console.error(`chdir: ${err}`); + * } + * ``` + * + * This feature is not available in `Worker` threads. + * @since v0.1.17 + */ + chdir(directory: string): void; + /** + * The `process.cwd()` method returns the current working directory of the Node.js + * process. + * + * ```js + * import { cwd } from 'process'; + * + * console.log(`Current directory: ${cwd()}`); + * ``` + * @since v0.1.8 + */ + cwd(): string; + /** + * The port used by the Node.js debugger when enabled. + * + * ```js + * import process from 'process'; + * + * process.debugPort = 5858; + * ``` + * @since v0.7.2 + */ + debugPort: number; + /** + * The `process.emitWarning()` method can be used to emit custom or application + * specific process warnings. These can be listened for by adding a handler to the `'warning'` event. + * + * ```js + * import { emitWarning } from 'process'; + * + * // Emit a warning with a code and additional detail. + * emitWarning('Something happened!', { + * code: 'MY_WARNING', + * detail: 'This is some additional information' + * }); + * // Emits: + * // (node:56338) [MY_WARNING] Warning: Something happened! + * // This is some additional information + * ``` + * + * In this example, an `Error` object is generated internally by`process.emitWarning()` and passed through to the `'warning'` handler. + * + * ```js + * import process from 'process'; + * + * process.on('warning', (warning) => { + * console.warn(warning.name); // 'Warning' + * console.warn(warning.message); // 'Something happened!' + * console.warn(warning.code); // 'MY_WARNING' + * console.warn(warning.stack); // Stack trace + * console.warn(warning.detail); // 'This is some additional information' + * }); + * ``` + * + * If `warning` is passed as an `Error` object, the `options` argument is ignored. + * @since v8.0.0 + * @param warning The warning to emit. + */ + emitWarning(warning: string | Error, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, ctor?: Function): void; + emitWarning(warning: string | Error, type?: string, code?: string, ctor?: Function): void; + emitWarning(warning: string | Error, options?: EmitWarningOptions): void; + /** + * The `process.env` property returns an object containing the user environment. + * See [`environ(7)`](http://man7.org/linux/man-pages/man7/environ.7.html). + * + * An example of this object looks like: + * + * ```js + * { + * TERM: 'xterm-256color', + * SHELL: '/usr/local/bin/bash', + * USER: 'maciej', + * PATH: '~/.bin/:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin', + * PWD: '/Users/maciej', + * EDITOR: 'vim', + * SHLVL: '1', + * HOME: '/Users/maciej', + * LOGNAME: 'maciej', + * _: '/usr/local/bin/node' + * } + * ``` + * + * It is possible to modify this object, but such modifications will not be + * reflected outside the Node.js process, or (unless explicitly requested) + * to other `Worker` threads. + * In other words, the following example would not work: + * + * ```console + * $ node -e 'process.env.foo = "bar"' && echo $foo + * ``` + * + * While the following will: + * + * ```js + * import { env } from 'process'; + * + * env.foo = 'bar'; + * console.log(env.foo); + * ``` + * + * Assigning a property on `process.env` will implicitly convert the value + * to a string. **This behavior is deprecated.** Future versions of Node.js may + * throw an error when the value is not a string, number, or boolean. + * + * ```js + * import { env } from 'process'; + * + * env.test = null; + * console.log(env.test); + * // => 'null' + * env.test = undefined; + * console.log(env.test); + * // => 'undefined' + * ``` + * + * Use `delete` to delete a property from `process.env`. + * + * ```js + * import { env } from 'process'; + * + * env.TEST = 1; + * delete env.TEST; + * console.log(env.TEST); + * // => undefined + * ``` + * + * On Windows operating systems, environment variables are case-insensitive. + * + * ```js + * import { env } from 'process'; + * + * env.TEST = 1; + * console.log(env.test); + * // => 1 + * ``` + * + * Unless explicitly specified when creating a `Worker` instance, + * each `Worker` thread has its own copy of `process.env`, based on its + * parent thread’s `process.env`, or whatever was specified as the `env` option + * to the `Worker` constructor. Changes to `process.env` will not be visible + * across `Worker` threads, and only the main thread can make changes that + * are visible to the operating system or to native add-ons. + * @since v0.1.27 + */ + env: ProcessEnv; + /** + * The `process.exit()` method instructs Node.js to terminate the process + * synchronously with an exit status of `code`. If `code` is omitted, exit uses + * either the 'success' code `0` or the value of `process.exitCode` if it has been + * set. Node.js will not terminate until all the `'exit'` event listeners are + * called. + * + * To exit with a 'failure' code: + * + * ```js + * import { exit } from 'process'; + * + * exit(1); + * ``` + * + * The shell that executed Node.js should see the exit code as `1`. + * + * Calling `process.exit()` will force the process to exit as quickly as possible + * even if there are still asynchronous operations pending that have not yet + * completed fully, including I/O operations to `process.stdout` and`process.stderr`. + * + * In most situations, it is not actually necessary to call `process.exit()`explicitly. The Node.js process will exit on its own _if there is no additional_ + * _work pending_ in the event loop. The `process.exitCode` property can be set to + * tell the process which exit code to use when the process exits gracefully. + * + * For instance, the following example illustrates a _misuse_ of the`process.exit()` method that could lead to data printed to stdout being + * truncated and lost: + * + * ```js + * import { exit } from 'process'; + * + * // This is an example of what *not* to do: + * if (someConditionNotMet()) { + * printUsageToStdout(); + * exit(1); + * } + * ``` + * + * The reason this is problematic is because writes to `process.stdout` in Node.js + * are sometimes _asynchronous_ and may occur over multiple ticks of the Node.js + * event loop. Calling `process.exit()`, however, forces the process to exit _before_ those additional writes to `stdout` can be performed. + * + * Rather than calling `process.exit()` directly, the code _should_ set the`process.exitCode` and allow the process to exit naturally by avoiding + * scheduling any additional work for the event loop: + * + * ```js + * import process from 'process'; + * + * // How to properly set the exit code while letting + * // the process exit gracefully. + * if (someConditionNotMet()) { + * printUsageToStdout(); + * process.exitCode = 1; + * } + * ``` + * + * If it is necessary to terminate the Node.js process due to an error condition, + * throwing an _uncaught_ error and allowing the process to terminate accordingly + * is safer than calling `process.exit()`. + * + * In `Worker` threads, this function stops the current thread rather + * than the current process. + * @since v0.1.13 + * @param [code=0] The exit code. + */ + exit(code?: number): never; + /** + * A number which will be the process exit code, when the process either + * exits gracefully, or is exited via {@link exit} without specifying + * a code. + * + * Specifying a code to {@link exit} will override any + * previous setting of `process.exitCode`. + * @since v0.11.8 + */ + exitCode?: number | undefined; + /** + * The `process.getgid()` method returns the numerical group identity of the + * process. (See [`getgid(2)`](http://man7.org/linux/man-pages/man2/getgid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getgid) { + * console.log(`Current gid: ${process.getgid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.31 + */ + getgid?: () => number; + /** + * The `process.setgid()` method sets the group identity of the process. (See [`setgid(2)`](http://man7.org/linux/man-pages/man2/setgid.2.html).) The `id` can be passed as either a + * numeric ID or a group name + * string. If a group name is specified, this method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getgid && process.setgid) { + * console.log(`Current gid: ${process.getgid()}`); + * try { + * process.setgid(501); + * console.log(`New gid: ${process.getgid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.31 + * @param id The group name or ID + */ + setgid?: (id: number | string) => void; + /** + * The `process.getuid()` method returns the numeric user identity of the process. + * (See [`getuid(2)`](http://man7.org/linux/man-pages/man2/getuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getuid) { + * console.log(`Current uid: ${process.getuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.1.28 + */ + getuid?: () => number; + /** + * The `process.setuid(id)` method sets the user identity of the process. (See [`setuid(2)`](http://man7.org/linux/man-pages/man2/setuid.2.html).) The `id` can be passed as either a + * numeric ID or a username string. + * If a username is specified, the method blocks while resolving the associated + * numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getuid && process.setuid) { + * console.log(`Current uid: ${process.getuid()}`); + * try { + * process.setuid(501); + * console.log(`New uid: ${process.getuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.1.28 + */ + setuid?: (id: number | string) => void; + /** + * The `process.geteuid()` method returns the numerical effective user identity of + * the process. (See [`geteuid(2)`](http://man7.org/linux/man-pages/man2/geteuid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.geteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + geteuid?: () => number; + /** + * The `process.seteuid()` method sets the effective user identity of the process. + * (See [`seteuid(2)`](http://man7.org/linux/man-pages/man2/seteuid.2.html).) The `id` can be passed as either a numeric ID or a username + * string. If a username is specified, the method blocks while resolving the + * associated numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.geteuid && process.seteuid) { + * console.log(`Current uid: ${process.geteuid()}`); + * try { + * process.seteuid(501); + * console.log(`New uid: ${process.geteuid()}`); + * } catch (err) { + * console.log(`Failed to set uid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A user name or ID + */ + seteuid?: (id: number | string) => void; + /** + * The `process.getegid()` method returns the numerical effective group identity + * of the Node.js process. (See [`getegid(2)`](http://man7.org/linux/man-pages/man2/getegid.2.html).) + * + * ```js + * import process from 'process'; + * + * if (process.getegid) { + * console.log(`Current gid: ${process.getegid()}`); + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v2.0.0 + */ + getegid?: () => number; + /** + * The `process.setegid()` method sets the effective group identity of the process. + * (See [`setegid(2)`](http://man7.org/linux/man-pages/man2/setegid.2.html).) The `id` can be passed as either a numeric ID or a group + * name string. If a group name is specified, this method blocks while resolving + * the associated a numeric ID. + * + * ```js + * import process from 'process'; + * + * if (process.getegid && process.setegid) { + * console.log(`Current gid: ${process.getegid()}`); + * try { + * process.setegid(501); + * console.log(`New gid: ${process.getegid()}`); + * } catch (err) { + * console.log(`Failed to set gid: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v2.0.0 + * @param id A group name or ID + */ + setegid?: (id: number | string) => void; + /** + * The `process.getgroups()` method returns an array with the supplementary group + * IDs. POSIX leaves it unspecified if the effective group ID is included but + * Node.js ensures it always is. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups) { + * console.log(process.getgroups()); // [ 16, 21, 297 ] + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * @since v0.9.4 + */ + getgroups?: () => number[]; + /** + * The `process.setgroups()` method sets the supplementary group IDs for the + * Node.js process. This is a privileged operation that requires the Node.js + * process to have `root` or the `CAP_SETGID` capability. + * + * The `groups` array can contain numeric group IDs, group names, or both. + * + * ```js + * import process from 'process'; + * + * if (process.getgroups && process.setgroups) { + * try { + * process.setgroups([501]); + * console.log(process.getgroups()); // new groups + * } catch (err) { + * console.log(`Failed to set groups: ${err}`); + * } + * } + * ``` + * + * This function is only available on POSIX platforms (i.e. not Windows or + * Android). + * This feature is not available in `Worker` threads. + * @since v0.9.4 + */ + setgroups?: (groups: ReadonlyArray) => void; + /** + * The `process.setUncaughtExceptionCaptureCallback()` function sets a function + * that will be invoked when an uncaught exception occurs, which will receive the + * exception value itself as its first argument. + * + * If such a function is set, the `'uncaughtException'` event will + * not be emitted. If `--abort-on-uncaught-exception` was passed from the + * command line or set through `v8.setFlagsFromString()`, the process will + * not abort. Actions configured to take place on exceptions such as report + * generations will be affected too + * + * To unset the capture function,`process.setUncaughtExceptionCaptureCallback(null)` may be used. Calling this + * method with a non-`null` argument while another capture function is set will + * throw an error. + * + * Using this function is mutually exclusive with using the deprecated `domain` built-in module. + * @since v9.3.0 + */ + setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; + /** + * Indicates whether a callback has been set using {@link setUncaughtExceptionCaptureCallback}. + * @since v9.3.0 + */ + hasUncaughtExceptionCaptureCallback(): boolean; + /** + * The `process.version` property contains the Node.js version string. + * + * ```js + * import { version } from 'process'; + * + * console.log(`Version: ${version}`); + * // Version: v14.8.0 + * ``` + * + * To get the version string without the prepended _v_, use`process.versions.node`. + * @since v0.1.3 + */ + readonly version: string; + /** + * The `process.versions` property returns an object listing the version strings of + * Node.js and its dependencies. `process.versions.modules` indicates the current + * ABI version, which is increased whenever a C++ API changes. Node.js will refuse + * to load modules that were compiled against a different module ABI version. + * + * ```js + * import { versions } from 'process'; + * + * console.log(versions); + * ``` + * + * Will generate an object similar to: + * + * ```console + * { node: '11.13.0', + * v8: '7.0.276.38-node.18', + * uv: '1.27.0', + * zlib: '1.2.11', + * brotli: '1.0.7', + * ares: '1.15.0', + * modules: '67', + * nghttp2: '1.34.0', + * napi: '4', + * llhttp: '1.1.1', + * openssl: '1.1.1b', + * cldr: '34.0', + * icu: '63.1', + * tz: '2018e', + * unicode: '11.0' } + * ``` + * @since v0.2.0 + */ + readonly versions: ProcessVersions; + /** + * The `process.config` property returns an `Object` containing the JavaScript + * representation of the configure options used to compile the current Node.js + * executable. This is the same as the `config.gypi` file that was produced when + * running the `./configure` script. + * + * An example of the possible output looks like: + * + * ```js + * { + * target_defaults: + * { cflags: [], + * default_configuration: 'Release', + * defines: [], + * include_dirs: [], + * libraries: [] }, + * variables: + * { + * host_arch: 'x64', + * napi_build_version: 5, + * node_install_npm: 'true', + * node_prefix: '', + * node_shared_cares: 'false', + * node_shared_http_parser: 'false', + * node_shared_libuv: 'false', + * node_shared_zlib: 'false', + * node_use_dtrace: 'false', + * node_use_openssl: 'true', + * node_shared_openssl: 'false', + * strict_aliasing: 'true', + * target_arch: 'x64', + * v8_use_snapshot: 1 + * } + * } + * ``` + * + * The `process.config` property is **not** read-only and there are existing + * modules in the ecosystem that are known to extend, modify, or entirely replace + * the value of `process.config`. + * + * Modifying the `process.config` property, or any child-property of the`process.config` object has been deprecated. The `process.config` will be made + * read-only in a future release. + * @since v0.7.7 + */ + readonly config: ProcessConfig; + /** + * The `process.kill()` method sends the `signal` to the process identified by`pid`. + * + * Signal names are strings such as `'SIGINT'` or `'SIGHUP'`. See `Signal Events` and [`kill(2)`](http://man7.org/linux/man-pages/man2/kill.2.html) for more information. + * + * This method will throw an error if the target `pid` does not exist. As a special + * case, a signal of `0` can be used to test for the existence of a process. + * Windows platforms will throw an error if the `pid` is used to kill a process + * group. + * + * Even though the name of this function is `process.kill()`, it is really just a + * signal sender, like the `kill` system call. The signal sent may do something + * other than kill the target process. + * + * ```js + * import process, { kill } from 'process'; + * + * process.on('SIGHUP', () => { + * console.log('Got SIGHUP signal.'); + * }); + * + * setTimeout(() => { + * console.log('Exiting.'); + * process.exit(0); + * }, 100); + * + * kill(process.pid, 'SIGHUP'); + * ``` + * + * When `SIGUSR1` is received by a Node.js process, Node.js will start the + * debugger. See `Signal Events`. + * @since v0.0.6 + * @param pid A process ID + * @param [signal='SIGTERM'] The signal to send, either as a string or number. + */ + kill(pid: number, signal?: string | number): true; + /** + * The `process.pid` property returns the PID of the process. + * + * ```js + * import { pid } from 'process'; + * + * console.log(`This process is pid ${pid}`); + * ``` + * @since v0.1.15 + */ + readonly pid: number; + /** + * The `process.ppid` property returns the PID of the parent of the + * current process. + * + * ```js + * import { ppid } from 'process'; + * + * console.log(`The parent process is pid ${ppid}`); + * ``` + * @since v9.2.0, v8.10.0, v6.13.0 + */ + readonly ppid: number; + /** + * The `process.title` property returns the current process title (i.e. returns + * the current value of `ps`). Assigning a new value to `process.title` modifies + * the current value of `ps`. + * + * When a new value is assigned, different platforms will impose different maximum + * length restrictions on the title. Usually such restrictions are quite limited. + * For instance, on Linux and macOS, `process.title` is limited to the size of the + * binary name plus the length of the command-line arguments because setting the`process.title` overwrites the `argv` memory of the process. Node.js v0.8 + * allowed for longer process title strings by also overwriting the `environ`memory but that was potentially insecure and confusing in some (rather obscure) + * cases. + * + * Assigning a value to `process.title` might not result in an accurate label + * within process manager applications such as macOS Activity Monitor or Windows + * Services Manager. + * @since v0.1.104 + */ + title: string; + /** + * The operating system CPU architecture for which the Node.js binary was compiled. + * Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'mips'`,`'mipsel'`, `'ppc'`,`'ppc64'`, `'s390'`, `'s390x'`, and `'x64'`. + * + * ```js + * import { arch } from 'process'; + * + * console.log(`This processor architecture is ${arch}`); + * ``` + * @since v0.5.0 + */ + readonly arch: Architecture; + /** + * The `process.platform` property returns a string identifying the operating + * system platform for which the Node.js binary was compiled. + * + * Currently possible values are: + * + * * `'aix'` + * * `'darwin'` + * * `'freebsd'` + * * `'linux'` + * * `'openbsd'` + * * `'sunos'` + * * `'win32'` + * + * ```js + * import { platform } from 'process'; + * + * console.log(`This platform is ${platform}`); + * ``` + * + * The value `'android'` may also be returned if the Node.js is built on the + * Android operating system. However, Android support in Node.js [is experimental](https://github.com/nodejs/node/blob/HEAD/BUILDING.md#androidandroid-based-devices-eg-firefox-os). + * @since v0.1.16 + */ + readonly platform: Platform; + /** + * The `process.mainModule` property provides an alternative way of retrieving `require.main`. The difference is that if the main module changes at + * runtime, `require.main` may still refer to the original main module in + * modules that were required before the change occurred. Generally, it's + * safe to assume that the two refer to the same module. + * + * As with `require.main`, `process.mainModule` will be `undefined` if there + * is no entry script. + * @since v0.1.17 + * @deprecated Since v14.0.0 - Use `main` instead. + */ + mainModule?: Module | undefined; + memoryUsage: MemoryUsageFn; + /** + * The `process.cpuUsage()` method returns the user and system CPU time usage of + * the current process, in an object with properties `user` and `system`, whose + * values are microsecond values (millionth of a second). These values measure time + * spent in user and system code respectively, and may end up being greater than + * actual elapsed time if multiple CPU cores are performing work for this process. + * + * The result of a previous call to `process.cpuUsage()` can be passed as the + * argument to the function, to get a diff reading. + * + * ```js + * import { cpuUsage } from 'process'; + * + * const startUsage = cpuUsage(); + * // { user: 38579, system: 6986 } + * + * // spin the CPU for 500 milliseconds + * const now = Date.now(); + * while (Date.now() - now < 500); + * + * console.log(cpuUsage(startUsage)); + * // { user: 514883, system: 11226 } + * ``` + * @since v6.1.0 + * @param previousValue A previous return value from calling `process.cpuUsage()` + */ + cpuUsage(previousValue?: CpuUsage): CpuUsage; + /** + * `process.nextTick()` adds `callback` to the "next tick queue". This queue is + * fully drained after the current operation on the JavaScript stack runs to + * completion and before the event loop is allowed to continue. It's possible to + * create an infinite loop if one were to recursively call `process.nextTick()`. + * See the [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#process-nexttick) guide for more background. + * + * ```js + * import { nextTick } from 'process'; + * + * console.log('start'); + * nextTick(() => { + * console.log('nextTick callback'); + * }); + * console.log('scheduled'); + * // Output: + * // start + * // scheduled + * // nextTick callback + * ``` + * + * This is important when developing APIs in order to give users the opportunity + * to assign event handlers _after_ an object has been constructed but before any + * I/O has occurred: + * + * ```js + * import { nextTick } from 'process'; + * + * function MyThing(options) { + * this.setupOptions(options); + * + * nextTick(() => { + * this.startDoingStuff(); + * }); + * } + * + * const thing = new MyThing(); + * thing.getReadyForStuff(); + * + * // thing.startDoingStuff() gets called now, not before. + * ``` + * + * It is very important for APIs to be either 100% synchronous or 100% + * asynchronous. Consider this example: + * + * ```js + * // WARNING! DO NOT USE! BAD UNSAFE HAZARD! + * function maybeSync(arg, cb) { + * if (arg) { + * cb(); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * + * This API is hazardous because in the following case: + * + * ```js + * const maybeTrue = Math.random() > 0.5; + * + * maybeSync(maybeTrue, () => { + * foo(); + * }); + * + * bar(); + * ``` + * + * It is not clear whether `foo()` or `bar()` will be called first. + * + * The following approach is much better: + * + * ```js + * import { nextTick } from 'process'; + * + * function definitelyAsync(arg, cb) { + * if (arg) { + * nextTick(cb); + * return; + * } + * + * fs.stat('file', cb); + * } + * ``` + * @since v0.1.26 + * @param args Additional arguments to pass when invoking the `callback` + */ + nextTick(callback: Function, ...args: any[]): void; + /** + * The `process.release` property returns an `Object` containing metadata related + * to the current release, including URLs for the source tarball and headers-only + * tarball. + * + * `process.release` contains the following properties: + * + * ```js + * { + * name: 'node', + * lts: 'Erbium', + * sourceUrl: 'https://nodejs.org/download/release/v12.18.1/node-v12.18.1.tar.gz', + * headersUrl: 'https://nodejs.org/download/release/v12.18.1/node-v12.18.1-headers.tar.gz', + * libUrl: 'https://nodejs.org/download/release/v12.18.1/win-x64/node.lib' + * } + * ``` + * + * In custom builds from non-release versions of the source tree, only the`name` property may be present. The additional properties should not be + * relied upon to exist. + * @since v3.0.0 + */ + readonly release: ProcessRelease; + features: { + inspector: boolean; + debug: boolean; + uv: boolean; + ipv6: boolean; + tls_alpn: boolean; + tls_sni: boolean; + tls_ocsp: boolean; + tls: boolean; + }; + /** + * `process.umask()` returns the Node.js process's file mode creation mask. Child + * processes inherit the mask from the parent process. + * @since v0.1.19 + * @deprecated Calling `process.umask()` with no argument causes the process-wide umask to be written twice. This introduces a race condition between threads, and is a potential * + * security vulnerability. There is no safe, cross-platform alternative API. + */ + umask(): number; + /** + * Can only be set if not in worker thread. + */ + umask(mask: string | number): number; + /** + * The `process.uptime()` method returns the number of seconds the current Node.js + * process has been running. + * + * The return value includes fractions of a second. Use `Math.floor()` to get whole + * seconds. + * @since v0.5.0 + */ + uptime(): number; + hrtime: HRTime; + /** + * If Node.js is spawned with an IPC channel, the `process.send()` method can be + * used to send messages to the parent process. Messages will be received as a `'message'` event on the parent's `ChildProcess` object. + * + * If Node.js was not spawned with an IPC channel, `process.send` will be`undefined`. + * + * The message goes through serialization and parsing. The resulting message might + * not be the same as what is originally sent. + * @since v0.5.9 + * @param options used to parameterize the sending of certain types of handles.`options` supports the following properties: + */ + send?( + message: any, + sendHandle?: any, + options?: { + swallowErrors?: boolean | undefined; + }, + callback?: (error: Error | null) => void + ): boolean; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.disconnect()` method will close the + * IPC channel to the parent process, allowing the child process to exit gracefully + * once there are no other connections keeping it alive. + * + * The effect of calling `process.disconnect()` is the same as calling `ChildProcess.disconnect()` from the parent process. + * + * If the Node.js process was not spawned with an IPC channel,`process.disconnect()` will be `undefined`. + * @since v0.7.2 + */ + disconnect(): void; + /** + * If the Node.js process is spawned with an IPC channel (see the `Child Process` and `Cluster` documentation), the `process.connected` property will return`true` so long as the IPC + * channel is connected and will return `false` after`process.disconnect()` is called. + * + * Once `process.connected` is `false`, it is no longer possible to send messages + * over the IPC channel using `process.send()`. + * @since v0.7.2 + */ + connected: boolean; + /** + * The `process.allowedNodeEnvironmentFlags` property is a special, + * read-only `Set` of flags allowable within the `NODE_OPTIONS` environment variable. + * + * `process.allowedNodeEnvironmentFlags` extends `Set`, but overrides`Set.prototype.has` to recognize several different possible flag + * representations. `process.allowedNodeEnvironmentFlags.has()` will + * return `true` in the following cases: + * + * * Flags may omit leading single (`-`) or double (`--`) dashes; e.g.,`inspect-brk` for `--inspect-brk`, or `r` for `-r`. + * * Flags passed through to V8 (as listed in `--v8-options`) may replace + * one or more _non-leading_ dashes for an underscore, or vice-versa; + * e.g., `--perf_basic_prof`, `--perf-basic-prof`, `--perf_basic-prof`, + * etc. + * * Flags may contain one or more equals (`=`) characters; all + * characters after and including the first equals will be ignored; + * e.g., `--stack-trace-limit=100`. + * * Flags _must_ be allowable within `NODE_OPTIONS`. + * + * When iterating over `process.allowedNodeEnvironmentFlags`, flags will + * appear only _once_; each will begin with one or more dashes. Flags + * passed through to V8 will contain underscores instead of non-leading + * dashes: + * + * ```js + * import { allowedNodeEnvironmentFlags } from 'process'; + * + * allowedNodeEnvironmentFlags.forEach((flag) => { + * // -r + * // --inspect-brk + * // --abort_on_uncaught_exception + * // ... + * }); + * ``` + * + * The methods `add()`, `clear()`, and `delete()` of`process.allowedNodeEnvironmentFlags` do nothing, and will fail + * silently. + * + * If Node.js was compiled _without_ `NODE_OPTIONS` support (shown in {@link config}), `process.allowedNodeEnvironmentFlags` will + * contain what _would have_ been allowable. + * @since v10.10.0 + */ + allowedNodeEnvironmentFlags: ReadonlySet; + /** + * `process.report` is an object whose methods are used to generate diagnostic + * reports for the current process. Additional documentation is available in the `report documentation`. + * @since v11.8.0 + */ + report?: ProcessReport | undefined; + /** + * ```js + * import { resourceUsage } from 'process'; + * + * console.log(resourceUsage()); + * /* + * Will output: + * { + * userCPUTime: 82872, + * systemCPUTime: 4143, + * maxRSS: 33164, + * sharedMemorySize: 0, + * unsharedDataSize: 0, + * unsharedStackSize: 0, + * minorPageFault: 2469, + * majorPageFault: 0, + * swappedOut: 0, + * fsRead: 0, + * fsWrite: 8, + * ipcSent: 0, + * ipcReceived: 0, + * signalsCount: 0, + * voluntaryContextSwitches: 79, + * involuntaryContextSwitches: 1 + * } + * + * ``` + * @since v12.6.0 + * @return the resource usage for the current process. All of these values come from the `uv_getrusage` call which returns a [`uv_rusage_t` struct][uv_rusage_t]. + */ + resourceUsage(): ResourceUsage; + /** + * The `process.traceDeprecation` property indicates whether the`--trace-deprecation` flag is set on the current Node.js process. See the + * documentation for the `'warning' event` and the `emitWarning() method` for more information about this + * flag's behavior. + * @since v0.8.0 + */ + traceDeprecation: boolean; + /* EventEmitter */ + addListener(event: 'beforeExit', listener: BeforeExitListener): this; + addListener(event: 'disconnect', listener: DisconnectListener): this; + addListener(event: 'exit', listener: ExitListener): this; + addListener(event: 'rejectionHandled', listener: RejectionHandledListener): this; + addListener(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + addListener(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + addListener(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + addListener(event: 'warning', listener: WarningListener): this; + addListener(event: 'message', listener: MessageListener): this; + addListener(event: Signals, listener: SignalsListener): this; + addListener(event: 'multipleResolves', listener: MultipleResolveListener): this; + addListener(event: 'worker', listener: WorkerListener): this; + emit(event: 'beforeExit', code: number): boolean; + emit(event: 'disconnect'): boolean; + emit(event: 'exit', code: number): boolean; + emit(event: 'rejectionHandled', promise: Promise): boolean; + emit(event: 'uncaughtException', error: Error): boolean; + emit(event: 'uncaughtExceptionMonitor', error: Error): boolean; + emit(event: 'unhandledRejection', reason: unknown, promise: Promise): boolean; + emit(event: 'warning', warning: Error): boolean; + emit(event: 'message', message: unknown, sendHandle: unknown): this; + emit(event: Signals, signal?: Signals): boolean; + emit(event: 'multipleResolves', type: MultipleResolveType, promise: Promise, value: unknown): this; + emit(event: 'worker', listener: WorkerListener): this; + on(event: 'beforeExit', listener: BeforeExitListener): this; + on(event: 'disconnect', listener: DisconnectListener): this; + on(event: 'exit', listener: ExitListener): this; + on(event: 'rejectionHandled', listener: RejectionHandledListener): this; + on(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + on(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + on(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + on(event: 'warning', listener: WarningListener): this; + on(event: 'message', listener: MessageListener): this; + on(event: Signals, listener: SignalsListener): this; + on(event: 'multipleResolves', listener: MultipleResolveListener): this; + on(event: 'worker', listener: WorkerListener): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'beforeExit', listener: BeforeExitListener): this; + once(event: 'disconnect', listener: DisconnectListener): this; + once(event: 'exit', listener: ExitListener): this; + once(event: 'rejectionHandled', listener: RejectionHandledListener): this; + once(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + once(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + once(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + once(event: 'warning', listener: WarningListener): this; + once(event: 'message', listener: MessageListener): this; + once(event: Signals, listener: SignalsListener): this; + once(event: 'multipleResolves', listener: MultipleResolveListener): this; + once(event: 'worker', listener: WorkerListener): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'beforeExit', listener: BeforeExitListener): this; + prependListener(event: 'disconnect', listener: DisconnectListener): this; + prependListener(event: 'exit', listener: ExitListener): this; + prependListener(event: 'rejectionHandled', listener: RejectionHandledListener): this; + prependListener(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + prependListener(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + prependListener(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + prependListener(event: 'warning', listener: WarningListener): this; + prependListener(event: 'message', listener: MessageListener): this; + prependListener(event: Signals, listener: SignalsListener): this; + prependListener(event: 'multipleResolves', listener: MultipleResolveListener): this; + prependListener(event: 'worker', listener: WorkerListener): this; + prependOnceListener(event: 'beforeExit', listener: BeforeExitListener): this; + prependOnceListener(event: 'disconnect', listener: DisconnectListener): this; + prependOnceListener(event: 'exit', listener: ExitListener): this; + prependOnceListener(event: 'rejectionHandled', listener: RejectionHandledListener): this; + prependOnceListener(event: 'uncaughtException', listener: UncaughtExceptionListener): this; + prependOnceListener(event: 'uncaughtExceptionMonitor', listener: UncaughtExceptionListener): this; + prependOnceListener(event: 'unhandledRejection', listener: UnhandledRejectionListener): this; + prependOnceListener(event: 'warning', listener: WarningListener): this; + prependOnceListener(event: 'message', listener: MessageListener): this; + prependOnceListener(event: Signals, listener: SignalsListener): this; + prependOnceListener(event: 'multipleResolves', listener: MultipleResolveListener): this; + prependOnceListener(event: 'worker', listener: WorkerListener): this; + listeners(event: 'beforeExit'): BeforeExitListener[]; + listeners(event: 'disconnect'): DisconnectListener[]; + listeners(event: 'exit'): ExitListener[]; + listeners(event: 'rejectionHandled'): RejectionHandledListener[]; + listeners(event: 'uncaughtException'): UncaughtExceptionListener[]; + listeners(event: 'uncaughtExceptionMonitor'): UncaughtExceptionListener[]; + listeners(event: 'unhandledRejection'): UnhandledRejectionListener[]; + listeners(event: 'warning'): WarningListener[]; + listeners(event: 'message'): MessageListener[]; + listeners(event: Signals): SignalsListener[]; + listeners(event: 'multipleResolves'): MultipleResolveListener[]; + listeners(event: 'worker'): WorkerListener[]; + } + } + } + export = process; +} +declare module 'node:process' { + import process = require('process'); + export = process; +} diff --git a/node_modules/@types/node/ts4.8/punycode.d.ts b/node_modules/@types/node/ts4.8/punycode.d.ts new file mode 100755 index 0000000..87ebbb9 --- /dev/null +++ b/node_modules/@types/node/ts4.8/punycode.d.ts @@ -0,0 +1,117 @@ +/** + * **The version of the punycode module bundled in Node.js is being deprecated.**In a future major version of Node.js this module will be removed. Users + * currently depending on the `punycode` module should switch to using the + * userland-provided [Punycode.js](https://github.com/bestiejs/punycode.js) module instead. For punycode-based URL + * encoding, see `url.domainToASCII` or, more generally, the `WHATWG URL API`. + * + * The `punycode` module is a bundled version of the [Punycode.js](https://github.com/bestiejs/punycode.js) module. It + * can be accessed using: + * + * ```js + * const punycode = require('punycode'); + * ``` + * + * [Punycode](https://tools.ietf.org/html/rfc3492) is a character encoding scheme defined by RFC 3492 that is + * primarily intended for use in Internationalized Domain Names. Because host + * names in URLs are limited to ASCII characters only, Domain Names that contain + * non-ASCII characters must be converted into ASCII using the Punycode scheme. + * For instance, the Japanese character that translates into the English word,`'example'` is `'例'`. The Internationalized Domain Name, `'例.com'` (equivalent + * to `'example.com'`) is represented by Punycode as the ASCII string`'xn--fsq.com'`. + * + * The `punycode` module provides a simple implementation of the Punycode standard. + * + * The `punycode` module is a third-party dependency used by Node.js and + * made available to developers as a convenience. Fixes or other modifications to + * the module must be directed to the [Punycode.js](https://github.com/bestiejs/punycode.js) project. + * @deprecated Since v7.0.0 - Deprecated + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/punycode.js) + */ +declare module 'punycode' { + /** + * The `punycode.decode()` method converts a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only + * characters to the equivalent string of Unicode codepoints. + * + * ```js + * punycode.decode('maana-pta'); // 'mañana' + * punycode.decode('--dqo34k'); // '☃-⌘' + * ``` + * @since v0.5.1 + */ + function decode(string: string): string; + /** + * The `punycode.encode()` method converts a string of Unicode codepoints to a [Punycode](https://tools.ietf.org/html/rfc3492) string of ASCII-only characters. + * + * ```js + * punycode.encode('mañana'); // 'maana-pta' + * punycode.encode('☃-⌘'); // '--dqo34k' + * ``` + * @since v0.5.1 + */ + function encode(string: string): string; + /** + * The `punycode.toUnicode()` method converts a string representing a domain name + * containing [Punycode](https://tools.ietf.org/html/rfc3492) encoded characters into Unicode. Only the [Punycode](https://tools.ietf.org/html/rfc3492) encoded parts of the domain name are be + * converted. + * + * ```js + * // decode domain names + * punycode.toUnicode('xn--maana-pta.com'); // 'mañana.com' + * punycode.toUnicode('xn----dqo34k.com'); // '☃-⌘.com' + * punycode.toUnicode('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toUnicode(domain: string): string; + /** + * The `punycode.toASCII()` method converts a Unicode string representing an + * Internationalized Domain Name to [Punycode](https://tools.ietf.org/html/rfc3492). Only the non-ASCII parts of the + * domain name will be converted. Calling `punycode.toASCII()` on a string that + * already only contains ASCII characters will have no effect. + * + * ```js + * // encode domain names + * punycode.toASCII('mañana.com'); // 'xn--maana-pta.com' + * punycode.toASCII('☃-⌘.com'); // 'xn----dqo34k.com' + * punycode.toASCII('example.com'); // 'example.com' + * ``` + * @since v0.6.1 + */ + function toASCII(domain: string): string; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const ucs2: ucs2; + interface ucs2 { + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + decode(string: string): number[]; + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + encode(codePoints: ReadonlyArray): string; + } + /** + * @deprecated since v7.0.0 + * The version of the punycode module bundled in Node.js is being deprecated. + * In a future major version of Node.js this module will be removed. + * Users currently depending on the punycode module should switch to using + * the userland-provided Punycode.js module instead. + */ + const version: string; +} +declare module 'node:punycode' { + export * from 'punycode'; +} diff --git a/node_modules/@types/node/ts4.8/querystring.d.ts b/node_modules/@types/node/ts4.8/querystring.d.ts new file mode 100755 index 0000000..e118547 --- /dev/null +++ b/node_modules/@types/node/ts4.8/querystring.d.ts @@ -0,0 +1,131 @@ +/** + * The `querystring` module provides utilities for parsing and formatting URL + * query strings. It can be accessed using: + * + * ```js + * const querystring = require('querystring'); + * ``` + * + * `querystring` is more performant than `URLSearchParams` but is not a + * standardized API. Use `URLSearchParams` when performance is not critical + * or when compatibility with browser code is desirable. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/querystring.js) + */ +declare module 'querystring' { + interface StringifyOptions { + encodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParseOptions { + maxKeys?: number | undefined; + decodeURIComponent?: ((str: string) => string) | undefined; + } + interface ParsedUrlQuery extends NodeJS.Dict {} + interface ParsedUrlQueryInput extends NodeJS.Dict | ReadonlyArray | ReadonlyArray | null> {} + /** + * The `querystring.stringify()` method produces a URL query string from a + * given `obj` by iterating through the object's "own properties". + * + * It serializes the following types of values passed in `obj`:[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) | + * [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) | + * [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) | + * [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) | + * [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to + * empty strings. + * + * ```js + * querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' }); + * // Returns 'foo=bar&baz=qux&baz=quux&corge=' + * + * querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':'); + * // Returns 'foo:bar;baz:qux' + * ``` + * + * By default, characters requiring percent-encoding within the query string will + * be encoded as UTF-8\. If an alternative encoding is required, then an alternative`encodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkEncodeURIComponent function already exists, + * + * querystring.stringify({ w: '中文', foo: 'bar' }, null, null, + * { encodeURIComponent: gbkEncodeURIComponent }); + * ``` + * @since v0.1.25 + * @param obj The object to serialize into a URL query string + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string; + /** + * The `querystring.parse()` method parses a URL query string (`str`) into a + * collection of key and value pairs. + * + * For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into: + * + * ```js + * { + * foo: 'bar', + * abc: ['xyz', '123'] + * } + * ``` + * + * The object returned by the `querystring.parse()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`, + * `obj.hasOwnProperty()`, and others + * are not defined and _will not work_. + * + * By default, percent-encoded characters within the query string will be assumed + * to use UTF-8 encoding. If an alternative character encoding is used, then an + * alternative `decodeURIComponent` option will need to be specified: + * + * ```js + * // Assuming gbkDecodeURIComponent function already exists... + * + * querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null, + * { decodeURIComponent: gbkDecodeURIComponent }); + * ``` + * @since v0.1.25 + * @param str The URL query string to parse + * @param [sep='&'] The substring used to delimit key and value pairs in the query string. + * @param [eq='='] . The substring used to delimit keys and values in the query string. + */ + function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery; + /** + * The querystring.encode() function is an alias for querystring.stringify(). + */ + const encode: typeof stringify; + /** + * The querystring.decode() function is an alias for querystring.parse(). + */ + const decode: typeof parse; + /** + * The `querystring.escape()` method performs URL percent-encoding on the given`str` in a manner that is optimized for the specific requirements of URL + * query strings. + * + * The `querystring.escape()` method is used by `querystring.stringify()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement percent-encoding implementation if + * necessary by assigning `querystring.escape` to an alternative function. + * @since v0.1.25 + */ + function escape(str: string): string; + /** + * The `querystring.unescape()` method performs decoding of URL percent-encoded + * characters on the given `str`. + * + * The `querystring.unescape()` method is used by `querystring.parse()` and is + * generally not expected to be used directly. It is exported primarily to allow + * application code to provide a replacement decoding implementation if + * necessary by assigning `querystring.unescape` to an alternative function. + * + * By default, the `querystring.unescape()` method will attempt to use the + * JavaScript built-in `decodeURIComponent()` method to decode. If that fails, + * a safer equivalent that does not throw on malformed URLs will be used. + * @since v0.1.25 + */ + function unescape(str: string): string; +} +declare module 'node:querystring' { + export * from 'querystring'; +} diff --git a/node_modules/@types/node/ts4.8/readline.d.ts b/node_modules/@types/node/ts4.8/readline.d.ts new file mode 100755 index 0000000..6ab64ac --- /dev/null +++ b/node_modules/@types/node/ts4.8/readline.d.ts @@ -0,0 +1,653 @@ +/** + * The `readline` module provides an interface for reading data from a `Readable` stream (such as `process.stdin`) one line at a time. + * + * To use the promise-based APIs: + * + * ```js + * import * as readline from 'node:readline/promises'; + * ``` + * + * To use the callback and sync APIs: + * + * ```js + * import * as readline from 'node:readline'; + * ``` + * + * The following simple example illustrates the basic use of the `readline` module. + * + * ```js + * import * as readline from 'node:readline/promises'; + * import { stdin as input, stdout as output } from 'node:process'; + * + * const rl = readline.createInterface({ input, output }); + * + * const answer = await rl.question('What do you think of Node.js? '); + * + * console.log(`Thank you for your valuable feedback: ${answer}`); + * + * rl.close(); + * ``` + * + * Once this code is invoked, the Node.js application will not terminate until the`readline.Interface` is closed because the interface waits for data to be + * received on the `input` stream. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline.js) + */ +declare module 'readline' { + import { Abortable, EventEmitter } from 'node:events'; + import * as promises from 'node:readline/promises'; + + export { promises }; + export interface Key { + sequence?: string | undefined; + name?: string | undefined; + ctrl?: boolean | undefined; + meta?: boolean | undefined; + shift?: boolean | undefined; + } + /** + * Instances of the `readline.Interface` class are constructed using the`readline.createInterface()` method. Every instance is associated with a + * single `input` `Readable` stream and a single `output` `Writable` stream. + * The `output` stream is used to print prompts for user input that arrives on, + * and is read from, the `input` stream. + * @since v0.1.104 + */ + export class Interface extends EventEmitter { + readonly terminal: boolean; + /** + * The current input data being processed by node. + * + * This can be used when collecting input from a TTY stream to retrieve the + * current value that has been processed thus far, prior to the `line` event + * being emitted. Once the `line` event has been emitted, this property will + * be an empty string. + * + * Be aware that modifying the value during the instance runtime may have + * unintended consequences if `rl.cursor` is not also controlled. + * + * **If not using a TTY stream for input, use the `'line'` event.** + * + * One possible use case would be as follows: + * + * ```js + * const values = ['lorem ipsum', 'dolor sit amet']; + * const rl = readline.createInterface(process.stdin); + * const showResults = debounce(() => { + * console.log( + * '\n', + * values.filter((val) => val.startsWith(rl.line)).join(' ') + * ); + * }, 300); + * process.stdin.on('keypress', (c, k) => { + * showResults(); + * }); + * ``` + * @since v0.1.98 + */ + readonly line: string; + /** + * The cursor position relative to `rl.line`. + * + * This will track where the current cursor lands in the input string, when + * reading input from a TTY stream. The position of cursor determines the + * portion of the input string that will be modified as input is processed, + * as well as the column where the terminal caret will be rendered. + * @since v0.1.98 + */ + readonly cursor: number; + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface + */ + protected constructor(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer | AsyncCompleter, terminal?: boolean); + /** + * NOTE: According to the documentation: + * + * > Instances of the `readline.Interface` class are constructed using the + * > `readline.createInterface()` method. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface + */ + protected constructor(options: ReadLineOptions); + /** + * The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`. + * @since v15.3.0 + * @return the current prompt string + */ + getPrompt(): string; + /** + * The `rl.setPrompt()` method sets the prompt that will be written to `output`whenever `rl.prompt()` is called. + * @since v0.1.98 + */ + setPrompt(prompt: string): void; + /** + * The `rl.prompt()` method writes the `readline.Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new + * location at which to provide input. + * + * When called, `rl.prompt()` will resume the `input` stream if it has been + * paused. + * + * If the `readline.Interface` was created with `output` set to `null` or`undefined` the prompt is not written. + * @since v0.1.98 + * @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`. + */ + prompt(preserveCursor?: boolean): void; + /** + * The `rl.question()` method displays the `query` by writing it to the `output`, + * waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument. + * + * When called, `rl.question()` will resume the `input` stream if it has been + * paused. + * + * If the `readline.Interface` was created with `output` set to `null` or`undefined` the `query` is not written. + * + * The `callback` function passed to `rl.question()` does not follow the typical + * pattern of accepting an `Error` object or `null` as the first argument. + * The `callback` is called with the provided answer as the only argument. + * + * Example usage: + * + * ```js + * rl.question('What is your favorite food? ', (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * ``` + * + * Using an `AbortController` to cancel a question. + * + * ```js + * const ac = new AbortController(); + * const signal = ac.signal; + * + * rl.question('What is your favorite food? ', { signal }, (answer) => { + * console.log(`Oh, so your favorite food is ${answer}`); + * }); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * setTimeout(() => ac.abort(), 10000); + * ``` + * + * If this method is invoked as it's util.promisify()ed version, it returns a + * Promise that fulfills with the answer. If the question is canceled using + * an `AbortController` it will reject with an `AbortError`. + * + * ```js + * const util = require('util'); + * const question = util.promisify(rl.question).bind(rl); + * + * async function questionExample() { + * try { + * const answer = await question('What is you favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * } catch (err) { + * console.error('Question rejected', err); + * } + * } + * questionExample(); + * ``` + * @since v0.3.3 + * @param query A statement or query to write to `output`, prepended to the prompt. + * @param callback A callback function that is invoked with the user's input in response to the `query`. + */ + question(query: string, callback: (answer: string) => void): void; + question(query: string, options: Abortable, callback: (answer: string) => void): void; + /** + * The `rl.pause()` method pauses the `input` stream, allowing it to be resumed + * later if necessary. + * + * Calling `rl.pause()` does not immediately pause other events (including`'line'`) from being emitted by the `readline.Interface` instance. + * @since v0.3.4 + */ + pause(): this; + /** + * The `rl.resume()` method resumes the `input` stream if it has been paused. + * @since v0.3.4 + */ + resume(): this; + /** + * The `rl.close()` method closes the `readline.Interface` instance and + * relinquishes control over the `input` and `output` streams. When called, + * the `'close'` event will be emitted. + * + * Calling `rl.close()` does not immediately stop other events (including `'line'`) + * from being emitted by the `readline.Interface` instance. + * @since v0.1.98 + */ + close(): void; + /** + * The `rl.write()` method will write either `data` or a key sequence identified + * by `key` to the `output`. The `key` argument is supported only if `output` is + * a `TTY` text terminal. See `TTY keybindings` for a list of key + * combinations. + * + * If `key` is specified, `data` is ignored. + * + * When called, `rl.write()` will resume the `input` stream if it has been + * paused. + * + * If the `readline.Interface` was created with `output` set to `null` or`undefined` the `data` and `key` are not written. + * + * ```js + * rl.write('Delete this!'); + * // Simulate Ctrl+U to delete the line written previously + * rl.write(null, { ctrl: true, name: 'u' }); + * ``` + * + * The `rl.write()` method will write the data to the `readline` `Interface`'s`input`_as if it were provided by the user_. + * @since v0.1.98 + */ + write(data: string | Buffer, key?: Key): void; + write(data: undefined | null | string | Buffer, key: Key): void; + /** + * Returns the real position of the cursor in relation to the input + * prompt + string. Long input (wrapping) strings, as well as multiple + * line prompts are included in the calculations. + * @since v13.5.0, v12.16.0 + */ + getCursorPos(): CursorPos; + /** + * events.EventEmitter + * 1. close + * 2. line + * 3. pause + * 4. resume + * 5. SIGCONT + * 6. SIGINT + * 7. SIGTSTP + * 8. history + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'line', listener: (input: string) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: 'SIGCONT', listener: () => void): this; + addListener(event: 'SIGINT', listener: () => void): this; + addListener(event: 'SIGTSTP', listener: () => void): this; + addListener(event: 'history', listener: (history: string[]) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'line', input: string): boolean; + emit(event: 'pause'): boolean; + emit(event: 'resume'): boolean; + emit(event: 'SIGCONT'): boolean; + emit(event: 'SIGINT'): boolean; + emit(event: 'SIGTSTP'): boolean; + emit(event: 'history', history: string[]): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'line', listener: (input: string) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: 'SIGCONT', listener: () => void): this; + on(event: 'SIGINT', listener: () => void): this; + on(event: 'SIGTSTP', listener: () => void): this; + on(event: 'history', listener: (history: string[]) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'line', listener: (input: string) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: 'SIGCONT', listener: () => void): this; + once(event: 'SIGINT', listener: () => void): this; + once(event: 'SIGTSTP', listener: () => void): this; + once(event: 'history', listener: (history: string[]) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'line', listener: (input: string) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: 'SIGCONT', listener: () => void): this; + prependListener(event: 'SIGINT', listener: () => void): this; + prependListener(event: 'SIGTSTP', listener: () => void): this; + prependListener(event: 'history', listener: (history: string[]) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'line', listener: (input: string) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: 'SIGCONT', listener: () => void): this; + prependOnceListener(event: 'SIGINT', listener: () => void): this; + prependOnceListener(event: 'SIGTSTP', listener: () => void): this; + prependOnceListener(event: 'history', listener: (history: string[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + export type ReadLine = Interface; // type forwarded for backwards compatibility + export type Completer = (line: string) => CompleterResult; + export type AsyncCompleter = (line: string, callback: (err?: null | Error, result?: CompleterResult) => void) => void; + export type CompleterResult = [string[], string]; + export interface ReadLineOptions { + input: NodeJS.ReadableStream; + output?: NodeJS.WritableStream | undefined; + completer?: Completer | AsyncCompleter | undefined; + terminal?: boolean | undefined; + /** + * Initial list of history lines. This option makes sense + * only if `terminal` is set to `true` by the user or by an internal `output` + * check, otherwise the history caching mechanism is not initialized at all. + * @default [] + */ + history?: string[] | undefined; + historySize?: number | undefined; + prompt?: string | undefined; + crlfDelay?: number | undefined; + /** + * If `true`, when a new input line added + * to the history list duplicates an older one, this removes the older line + * from the list. + * @default false + */ + removeHistoryDuplicates?: boolean | undefined; + escapeCodeTimeout?: number | undefined; + tabSize?: number | undefined; + } + /** + * The `readline.createInterface()` method creates a new `readline.Interface`instance. + * + * ```js + * const readline = require('readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout + * }); + * ``` + * + * Once the `readline.Interface` instance is created, the most common case is to + * listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get + * the best compatibility if it defines an `output.columns` property and emits + * a `'resize'` event on the `output` if or when the columns ever change + * (`process.stdout` does this automatically when it is a TTY). + * + * When creating a `readline.Interface` using `stdin` as input, the program + * will not terminate until it receives `EOF` (Ctrl+D on + * Linux/macOS, Ctrl+Z followed by Return on + * Windows). + * If you want your application to exit without waiting for user input, you can `unref()` the standard input stream: + * + * ```js + * process.stdin.unref(); + * ``` + * @since v0.1.98 + */ + export function createInterface(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer | AsyncCompleter, terminal?: boolean): Interface; + export function createInterface(options: ReadLineOptions): Interface; + /** + * The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input. + * + * Optionally, `interface` specifies a `readline.Interface` instance for which + * autocompletion is disabled when copy-pasted input is detected. + * + * If the `stream` is a `TTY`, then it must be in raw mode. + * + * This is automatically called by any readline instance on its `input` if the`input` is a terminal. Closing the `readline` instance does not stop + * the `input` from emitting `'keypress'` events. + * + * ```js + * readline.emitKeypressEvents(process.stdin); + * if (process.stdin.isTTY) + * process.stdin.setRawMode(true); + * ``` + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ' + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('events'); + * const { createReadStream } = require('fs'); + * const { createInterface } = require('readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + */ + export function emitKeypressEvents(stream: NodeJS.ReadableStream, readlineInterface?: Interface): void; + export type Direction = -1 | 0 | 1; + export interface CursorPos { + rows: number; + cols: number; + } + /** + * The `readline.clearLine()` method clears current line of given `TTY` stream + * in a specified direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearLine(stream: NodeJS.WritableStream, dir: Direction, callback?: () => void): boolean; + /** + * The `readline.clearScreenDown()` method clears the given `TTY` stream from + * the current position of the cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function clearScreenDown(stream: NodeJS.WritableStream, callback?: () => void): boolean; + /** + * The `readline.cursorTo()` method moves cursor to the specified position in a + * given `TTY` `stream`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number, callback?: () => void): boolean; + /** + * The `readline.moveCursor()` method moves the cursor _relative_ to its current + * position in a given `TTY` `stream`. + * + * ## Example: Tiny CLI + * + * The following example illustrates the use of `readline.Interface` class to + * implement a small command-line interface: + * + * ```js + * const readline = require('readline'); + * const rl = readline.createInterface({ + * input: process.stdin, + * output: process.stdout, + * prompt: 'OHAI> ' + * }); + * + * rl.prompt(); + * + * rl.on('line', (line) => { + * switch (line.trim()) { + * case 'hello': + * console.log('world!'); + * break; + * default: + * console.log(`Say what? I might have heard '${line.trim()}'`); + * break; + * } + * rl.prompt(); + * }).on('close', () => { + * console.log('Have a great day!'); + * process.exit(0); + * }); + * ``` + * + * ## Example: Read file stream line-by-Line + * + * A common use case for `readline` is to consume an input file one line at a + * time. The easiest way to do so is leveraging the `fs.ReadStream` API as + * well as a `for await...of` loop: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * async function processLineByLine() { + * const fileStream = fs.createReadStream('input.txt'); + * + * const rl = readline.createInterface({ + * input: fileStream, + * crlfDelay: Infinity + * }); + * // Note: we use the crlfDelay option to recognize all instances of CR LF + * // ('\r\n') in input.txt as a single line break. + * + * for await (const line of rl) { + * // Each line in input.txt will be successively available here as `line`. + * console.log(`Line from file: ${line}`); + * } + * } + * + * processLineByLine(); + * ``` + * + * Alternatively, one could use the `'line'` event: + * + * ```js + * const fs = require('fs'); + * const readline = require('readline'); + * + * const rl = readline.createInterface({ + * input: fs.createReadStream('sample.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * console.log(`Line from file: ${line}`); + * }); + * ``` + * + * Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied: + * + * ```js + * const { once } = require('events'); + * const { createReadStream } = require('fs'); + * const { createInterface } = require('readline'); + * + * (async function processLineByLine() { + * try { + * const rl = createInterface({ + * input: createReadStream('big-file.txt'), + * crlfDelay: Infinity + * }); + * + * rl.on('line', (line) => { + * // Process the line. + * }); + * + * await once(rl, 'close'); + * + * console.log('File processed.'); + * } catch (err) { + * console.error(err); + * } + * })(); + * ``` + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + export function moveCursor(stream: NodeJS.WritableStream, dx: number, dy: number, callback?: () => void): boolean; +} +declare module 'node:readline' { + export * from 'readline'; +} diff --git a/node_modules/@types/node/ts4.8/readline/promises.d.ts b/node_modules/@types/node/ts4.8/readline/promises.d.ts new file mode 100755 index 0000000..8f9f06f --- /dev/null +++ b/node_modules/@types/node/ts4.8/readline/promises.d.ts @@ -0,0 +1,143 @@ +/** + * The `readline/promise` module provides an API for reading lines of input from a Readable stream one line at a time. + * + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline/promises.js) + * @since v17.0.0 + */ +declare module 'readline/promises' { + import { Interface as _Interface, ReadLineOptions, Completer, AsyncCompleter, Direction } from 'node:readline'; + import { Abortable } from 'node:events'; + + class Interface extends _Interface { + /** + * The rl.question() method displays the query by writing it to the output, waits for user input to be provided on input, + * then invokes the callback function passing the provided input as the first argument. + * + * When called, rl.question() will resume the input stream if it has been paused. + * + * If the readlinePromises.Interface was created with output set to null or undefined the query is not written. + * + * If the question is called after rl.close(), it returns a rejected promise. + * + * Example usage: + * + * ```js + * const answer = await rl.question('What is your favorite food? '); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * Using an AbortSignal to cancel a question. + * + * ```js + * const signal = AbortSignal.timeout(10_000); + * + * signal.addEventListener('abort', () => { + * console.log('The food question timed out'); + * }, { once: true }); + * + * const answer = await rl.question('What is your favorite food? ', { signal }); + * console.log(`Oh, so your favorite food is ${answer}`); + * ``` + * + * @since v17.0.0 + * @param query A statement or query to write to output, prepended to the prompt. + */ + question(query: string): Promise; + question(query: string, options: Abortable): Promise; + } + + class Readline { + /** + * @param stream A TTY stream. + */ + constructor(stream: NodeJS.WritableStream, options?: { autoCommit?: boolean }); + /** + * The `rl.clearLine()` method adds to the internal list of pending action an action that clears current line of the associated `stream` in a specified direction identified by `dir`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + */ + clearLine(dir: Direction): this; + /** + * The `rl.clearScreenDown()` method adds to the internal list of pending action an action that clears the associated `stream` from the current position of the cursor down. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + */ + clearScreenDown(): this; + /** + * The `rl.commit()` method sends all the pending actions to the associated `stream` and clears the internal list of pending actions. + */ + commit(): Promise; + /** + * The `rl.cursorTo()` method adds to the internal list of pending action an action that moves cursor to the specified position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor. + */ + cursorTo(x: number, y?: number): this; + /** + * The `rl.moveCursor()` method adds to the internal list of pending action an action that moves the cursor relative to its current position in the associated `stream`. + * Call `rl.commit()` to see the effect of this method, unless autoCommit: true was passed to the constructor. + */ + moveCursor(dx: number, dy: number): this; + /** + * The `rl.rollback()` method clears the internal list of pending actions without sending it to the associated `stream`. + */ + rollback(): this; + } + + /** + * The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface` instance. + * + * ```js + * const readlinePromises = require('node:readline/promises'); + * const rl = readlinePromises.createInterface({ + * input: process.stdin, + * output: process.stdout + * }); + * ``` + * + * Once the `readlinePromises.Interface` instance is created, the most common case is to listen for the `'line'` event: + * + * ```js + * rl.on('line', (line) => { + * console.log(`Received: ${line}`); + * }); + * ``` + * + * If `terminal` is `true` for this instance then the `output` stream will get the best compatibility if it defines an `output.columns` property, + * and emits a `'resize'` event on the `output`, if or when the columns ever change (`process.stdout` does this automatically when it is a TTY). + * + * ## Use of the `completer` function + * + * The `completer` function takes the current line entered by the user as an argument, and returns an `Array` with 2 entries: + * + * - An Array with matching entries for the completion. + * - The substring that was used for the matching. + * + * For instance: `[[substr1, substr2, ...], originalsubstring]`. + * + * ```js + * function completer(line) { + * const completions = '.help .error .exit .quit .q'.split(' '); + * const hits = completions.filter((c) => c.startsWith(line)); + * // Show all completions if none found + * return [hits.length ? hits : completions, line]; + * } + * ``` + * + * The `completer` function can also returns a `Promise`, or be asynchronous: + * + * ```js + * async function completer(linePartial) { + * await someAsyncWork(); + * return [['123'], linePartial]; + * } + * ``` + */ + function createInterface( + input: NodeJS.ReadableStream, + output?: NodeJS.WritableStream, + completer?: Completer | AsyncCompleter, + terminal?: boolean, + ): Interface; + function createInterface(options: ReadLineOptions): Interface; +} +declare module 'node:readline/promises' { + export * from 'readline/promises'; +} diff --git a/node_modules/@types/node/ts4.8/repl.d.ts b/node_modules/@types/node/ts4.8/repl.d.ts new file mode 100755 index 0000000..be42ccc --- /dev/null +++ b/node_modules/@types/node/ts4.8/repl.d.ts @@ -0,0 +1,424 @@ +/** + * The `repl` module provides a Read-Eval-Print-Loop (REPL) implementation that + * is available both as a standalone program or includible in other applications. + * It can be accessed using: + * + * ```js + * const repl = require('repl'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/repl.js) + */ +declare module 'repl' { + import { Interface, Completer, AsyncCompleter } from 'node:readline'; + import { Context } from 'node:vm'; + import { InspectOptions } from 'node:util'; + interface ReplOptions { + /** + * The input prompt to display. + * @default "> " + */ + prompt?: string | undefined; + /** + * The `Readable` stream from which REPL input will be read. + * @default process.stdin + */ + input?: NodeJS.ReadableStream | undefined; + /** + * The `Writable` stream to which REPL output will be written. + * @default process.stdout + */ + output?: NodeJS.WritableStream | undefined; + /** + * If `true`, specifies that the output should be treated as a TTY terminal, and have + * ANSI/VT100 escape codes written to it. + * Default: checking the value of the `isTTY` property on the output stream upon + * instantiation. + */ + terminal?: boolean | undefined; + /** + * The function to be used when evaluating each given line of input. + * Default: an async wrapper for the JavaScript `eval()` function. An `eval` function can + * error with `repl.Recoverable` to indicate the input was incomplete and prompt for + * additional lines. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_default_evaluation + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_custom_evaluation_functions + */ + eval?: REPLEval | undefined; + /** + * Defines if the repl prints output previews or not. + * @default `true` Always `false` in case `terminal` is falsy. + */ + preview?: boolean | undefined; + /** + * If `true`, specifies that the default `writer` function should include ANSI color + * styling to REPL output. If a custom `writer` function is provided then this has no + * effect. + * Default: the REPL instance's `terminal` value. + */ + useColors?: boolean | undefined; + /** + * If `true`, specifies that the default evaluation function will use the JavaScript + * `global` as the context as opposed to creating a new separate context for the REPL + * instance. The node CLI REPL sets this value to `true`. + * Default: `false`. + */ + useGlobal?: boolean | undefined; + /** + * If `true`, specifies that the default writer will not output the return value of a + * command if it evaluates to `undefined`. + * Default: `false`. + */ + ignoreUndefined?: boolean | undefined; + /** + * The function to invoke to format the output of each command before writing to `output`. + * Default: a wrapper for `util.inspect`. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_customizing_repl_output + */ + writer?: REPLWriter | undefined; + /** + * An optional function used for custom Tab auto completion. + * + * @see https://nodejs.org/dist/latest-v11.x/docs/api/readline.html#readline_use_of_the_completer_function + */ + completer?: Completer | AsyncCompleter | undefined; + /** + * A flag that specifies whether the default evaluator executes all JavaScript commands in + * strict mode or default (sloppy) mode. + * Accepted values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + replMode?: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT | undefined; + /** + * Stop evaluating the current piece of code when `SIGINT` is received, i.e. `Ctrl+C` is + * pressed. This cannot be used together with a custom `eval` function. + * Default: `false`. + */ + breakEvalOnSigint?: boolean | undefined; + } + type REPLEval = (this: REPLServer, evalCmd: string, context: Context, file: string, cb: (err: Error | null, result: any) => void) => void; + type REPLWriter = (this: REPLServer, obj: any) => string; + /** + * This is the default "writer" value, if none is passed in the REPL options, + * and it can be overridden by custom print functions. + */ + const writer: REPLWriter & { + options: InspectOptions; + }; + type REPLCommandAction = (this: REPLServer, text: string) => void; + interface REPLCommand { + /** + * Help text to be displayed when `.help` is entered. + */ + help?: string | undefined; + /** + * The function to execute, optionally accepting a single string argument. + */ + action: REPLCommandAction; + } + /** + * Instances of `repl.REPLServer` are created using the {@link start} method + * or directly using the JavaScript `new` keyword. + * + * ```js + * const repl = require('repl'); + * + * const options = { useColors: true }; + * + * const firstInstance = repl.start(options); + * const secondInstance = new repl.REPLServer(options); + * ``` + * @since v0.1.91 + */ + class REPLServer extends Interface { + /** + * The `vm.Context` provided to the `eval` function to be used for JavaScript + * evaluation. + */ + readonly context: Context; + /** + * @deprecated since v14.3.0 - Use `input` instead. + */ + readonly inputStream: NodeJS.ReadableStream; + /** + * @deprecated since v14.3.0 - Use `output` instead. + */ + readonly outputStream: NodeJS.WritableStream; + /** + * The `Readable` stream from which REPL input will be read. + */ + readonly input: NodeJS.ReadableStream; + /** + * The `Writable` stream to which REPL output will be written. + */ + readonly output: NodeJS.WritableStream; + /** + * The commands registered via `replServer.defineCommand()`. + */ + readonly commands: NodeJS.ReadOnlyDict; + /** + * A value indicating whether the REPL is currently in "editor mode". + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_commands_and_special_keys + */ + readonly editorMode: boolean; + /** + * A value indicating whether the `_` variable has been assigned. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreAssigned: boolean; + /** + * The last evaluation result from the REPL (assigned to the `_` variable inside of the REPL). + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly last: any; + /** + * A value indicating whether the `_error` variable has been assigned. + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly underscoreErrAssigned: boolean; + /** + * The last error raised inside the REPL (assigned to the `_error` variable inside of the REPL). + * + * @since v9.8.0 + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_assignment_of_the_underscore_variable + */ + readonly lastError: any; + /** + * Specified in the REPL options, this is the function to be used when evaluating each + * given line of input. If not specified in the REPL options, this is an async wrapper + * for the JavaScript `eval()` function. + */ + readonly eval: REPLEval; + /** + * Specified in the REPL options, this is a value indicating whether the default + * `writer` function should include ANSI color styling to REPL output. + */ + readonly useColors: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `eval` + * function will use the JavaScript `global` as the context as opposed to creating a new + * separate context for the REPL instance. + */ + readonly useGlobal: boolean; + /** + * Specified in the REPL options, this is a value indicating whether the default `writer` + * function should output the result of a command if it evaluates to `undefined`. + */ + readonly ignoreUndefined: boolean; + /** + * Specified in the REPL options, this is the function to invoke to format the output of + * each command before writing to `outputStream`. If not specified in the REPL options, + * this will be a wrapper for `util.inspect`. + */ + readonly writer: REPLWriter; + /** + * Specified in the REPL options, this is the function to use for custom Tab auto-completion. + */ + readonly completer: Completer | AsyncCompleter; + /** + * Specified in the REPL options, this is a flag that specifies whether the default `eval` + * function should execute all JavaScript commands in strict mode or default (sloppy) mode. + * Possible values are: + * - `repl.REPL_MODE_SLOPPY` - evaluates expressions in sloppy mode. + * - `repl.REPL_MODE_STRICT` - evaluates expressions in strict mode. This is equivalent to + * prefacing every repl statement with `'use strict'`. + */ + readonly replMode: typeof REPL_MODE_SLOPPY | typeof REPL_MODE_STRICT; + /** + * NOTE: According to the documentation: + * + * > Instances of `repl.REPLServer` are created using the `repl.start()` method and + * > _should not_ be created directly using the JavaScript `new` keyword. + * + * `REPLServer` cannot be subclassed due to implementation specifics in NodeJS. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_class_replserver + */ + private constructor(); + /** + * The `replServer.defineCommand()` method is used to add new `.`\-prefixed commands + * to the REPL instance. Such commands are invoked by typing a `.` followed by the`keyword`. The `cmd` is either a `Function` or an `Object` with the following + * properties: + * + * The following example shows two new commands added to the REPL instance: + * + * ```js + * const repl = require('repl'); + * + * const replServer = repl.start({ prompt: '> ' }); + * replServer.defineCommand('sayhello', { + * help: 'Say hello', + * action(name) { + * this.clearBufferedCommand(); + * console.log(`Hello, ${name}!`); + * this.displayPrompt(); + * } + * }); + * replServer.defineCommand('saybye', function saybye() { + * console.log('Goodbye!'); + * this.close(); + * }); + * ``` + * + * The new commands can then be used from within the REPL instance: + * + * ```console + * > .sayhello Node.js User + * Hello, Node.js User! + * > .saybye + * Goodbye! + * ``` + * @since v0.3.0 + * @param keyword The command keyword (_without_ a leading `.` character). + * @param cmd The function to invoke when the command is processed. + */ + defineCommand(keyword: string, cmd: REPLCommandAction | REPLCommand): void; + /** + * The `replServer.displayPrompt()` method readies the REPL instance for input + * from the user, printing the configured `prompt` to a new line in the `output`and resuming the `input` to accept new input. + * + * When multi-line input is being entered, an ellipsis is printed rather than the + * 'prompt'. + * + * When `preserveCursor` is `true`, the cursor placement will not be reset to `0`. + * + * The `replServer.displayPrompt` method is primarily intended to be called from + * within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v0.1.91 + */ + displayPrompt(preserveCursor?: boolean): void; + /** + * The `replServer.clearBufferedCommand()` method clears any command that has been + * buffered but not yet executed. This method is primarily intended to be + * called from within the action function for commands registered using the`replServer.defineCommand()` method. + * @since v9.0.0 + */ + clearBufferedCommand(): void; + /** + * Initializes a history log file for the REPL instance. When executing the + * Node.js binary and using the command-line REPL, a history file is initialized + * by default. However, this is not the case when creating a REPL + * programmatically. Use this method to initialize a history log file when working + * with REPL instances programmatically. + * @since v11.10.0 + * @param historyPath the path to the history file + * @param callback called when history writes are ready or upon error + */ + setupHistory(path: string, callback: (err: Error | null, repl: this) => void): void; + /** + * events.EventEmitter + * 1. close - inherited from `readline.Interface` + * 2. line - inherited from `readline.Interface` + * 3. pause - inherited from `readline.Interface` + * 4. resume - inherited from `readline.Interface` + * 5. SIGCONT - inherited from `readline.Interface` + * 6. SIGINT - inherited from `readline.Interface` + * 7. SIGTSTP - inherited from `readline.Interface` + * 8. exit + * 9. reset + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'close', listener: () => void): this; + addListener(event: 'line', listener: (input: string) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: 'SIGCONT', listener: () => void): this; + addListener(event: 'SIGINT', listener: () => void): this; + addListener(event: 'SIGTSTP', listener: () => void): this; + addListener(event: 'exit', listener: () => void): this; + addListener(event: 'reset', listener: (context: Context) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'close'): boolean; + emit(event: 'line', input: string): boolean; + emit(event: 'pause'): boolean; + emit(event: 'resume'): boolean; + emit(event: 'SIGCONT'): boolean; + emit(event: 'SIGINT'): boolean; + emit(event: 'SIGTSTP'): boolean; + emit(event: 'exit'): boolean; + emit(event: 'reset', context: Context): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'close', listener: () => void): this; + on(event: 'line', listener: (input: string) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: 'SIGCONT', listener: () => void): this; + on(event: 'SIGINT', listener: () => void): this; + on(event: 'SIGTSTP', listener: () => void): this; + on(event: 'exit', listener: () => void): this; + on(event: 'reset', listener: (context: Context) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'line', listener: (input: string) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: 'SIGCONT', listener: () => void): this; + once(event: 'SIGINT', listener: () => void): this; + once(event: 'SIGTSTP', listener: () => void): this; + once(event: 'exit', listener: () => void): this; + once(event: 'reset', listener: (context: Context) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'line', listener: (input: string) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: 'SIGCONT', listener: () => void): this; + prependListener(event: 'SIGINT', listener: () => void): this; + prependListener(event: 'SIGTSTP', listener: () => void): this; + prependListener(event: 'exit', listener: () => void): this; + prependListener(event: 'reset', listener: (context: Context) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'line', listener: (input: string) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: 'SIGCONT', listener: () => void): this; + prependOnceListener(event: 'SIGINT', listener: () => void): this; + prependOnceListener(event: 'SIGTSTP', listener: () => void): this; + prependOnceListener(event: 'exit', listener: () => void): this; + prependOnceListener(event: 'reset', listener: (context: Context) => void): this; + } + /** + * A flag passed in the REPL options. Evaluates expressions in sloppy mode. + */ + const REPL_MODE_SLOPPY: unique symbol; + /** + * A flag passed in the REPL options. Evaluates expressions in strict mode. + * This is equivalent to prefacing every repl statement with `'use strict'`. + */ + const REPL_MODE_STRICT: unique symbol; + /** + * The `repl.start()` method creates and starts a {@link REPLServer} instance. + * + * If `options` is a string, then it specifies the input prompt: + * + * ```js + * const repl = require('repl'); + * + * // a Unix style prompt + * repl.start('$ '); + * ``` + * @since v0.1.91 + */ + function start(options?: string | ReplOptions): REPLServer; + /** + * Indicates a recoverable error that a `REPLServer` can use to support multi-line input. + * + * @see https://nodejs.org/dist/latest-v10.x/docs/api/repl.html#repl_recoverable_errors + */ + class Recoverable extends SyntaxError { + err: Error; + constructor(err: Error); + } +} +declare module 'node:repl' { + export * from 'repl'; +} diff --git a/node_modules/@types/node/ts4.8/stream.d.ts b/node_modules/@types/node/ts4.8/stream.d.ts new file mode 100755 index 0000000..a0df689 --- /dev/null +++ b/node_modules/@types/node/ts4.8/stream.d.ts @@ -0,0 +1,1340 @@ +/** + * A stream is an abstract interface for working with streaming data in Node.js. + * The `stream` module provides an API for implementing the stream interface. + * + * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances. + * + * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`. + * + * To access the `stream` module: + * + * ```js + * const stream = require('stream'); + * ``` + * + * The `stream` module is useful for creating new types of stream instances. It is + * usually not necessary to use the `stream` module to consume streams. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/stream.js) + */ +declare module 'stream' { + import { EventEmitter, Abortable } from 'node:events'; + import { Blob as NodeBlob } from "node:buffer"; + import * as streamPromises from 'node:stream/promises'; + import * as streamConsumers from 'node:stream/consumers'; + import * as streamWeb from 'node:stream/web'; + class internal extends EventEmitter { + pipe( + destination: T, + options?: { + end?: boolean | undefined; + } + ): T; + } + namespace internal { + class Stream extends internal { + constructor(opts?: ReadableOptions); + } + interface StreamOptions extends Abortable { + emitClose?: boolean | undefined; + highWaterMark?: number | undefined; + objectMode?: boolean | undefined; + construct?(this: T, callback: (error?: Error | null) => void): void; + destroy?(this: T, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean | undefined; + } + interface ReadableOptions extends StreamOptions { + encoding?: BufferEncoding | undefined; + read?(this: Readable, size: number): void; + } + /** + * @since v0.9.4 + */ + class Readable extends Stream implements NodeJS.ReadableStream { + /** + * A utility method for creating Readable Streams out of iterators. + */ + static from(iterable: Iterable | AsyncIterable, options?: ReadableOptions): Readable; + /** + * A utility method for creating a `Readable` from a web `ReadableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb(readableStream: streamWeb.ReadableStream, options?: Pick): Readable; + /** + * Returns whether the stream has been read from or cancelled. + * @since v16.8.0 + */ + static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; + /** + * A utility method for creating a web `ReadableStream` from a `Readable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamReadable: Readable): streamWeb.ReadableStream; + /** + * Returns whether the stream was destroyed or errored before emitting `'end'`. + * @since v16.8.0 + * @experimental + */ + readonly readableAborted: boolean; + /** + * Is `true` if it is safe to call `readable.read()`, which means + * the stream has not been destroyed or emitted `'error'` or `'end'`. + * @since v11.4.0 + */ + readable: boolean; + /** + * Returns whether `'data'` has been emitted. + * @since v16.7.0, v14.18.0 + * @experimental + */ + readonly readableDidRead: boolean; + /** + * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method. + * @since v12.7.0 + */ + readonly readableEncoding: BufferEncoding | null; + /** + * Becomes `true` when `'end'` event is emitted. + * @since v12.9.0 + */ + readonly readableEnded: boolean; + /** + * This property reflects the current state of a `Readable` stream as described + * in the `Three states` section. + * @since v9.4.0 + */ + readonly readableFlowing: boolean | null; + /** + * Returns the value of `highWaterMark` passed when creating this `Readable`. + * @since v9.3.0 + */ + readonly readableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be read. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly readableLength: number; + /** + * Getter for the property `objectMode` of a given `Readable` stream. + * @since v12.3.0 + */ + readonly readableObjectMode: boolean; + /** + * Is `true` after `readable.destroy()` has been called. + * @since v18.0.0 + */ + destroyed: boolean; + /** + * Is true after 'close' has been emitted. + * @since v8.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + constructor(opts?: ReadableOptions); + _construct?(callback: (error?: Error | null) => void): void; + _read(size: number): void; + /** + * The `readable.read()` method reads data out of the internal buffer and + * returns it. If no data is available to be read, `null` is returned. By default, + * the data is returned as a `Buffer` object unless an encoding has been + * specified using the `readable.setEncoding()` method or the stream is operating + * in object mode. + * + * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which + * case all of the data remaining in the internal + * buffer will be returned. + * + * If the `size` argument is not specified, all of the data contained in the + * internal buffer will be returned. + * + * The `size` argument must be less than or equal to 1 GiB. + * + * The `readable.read()` method should only be called on `Readable` streams + * operating in paused mode. In flowing mode, `readable.read()` is called + * automatically until the internal buffer is fully drained. + * + * ```js + * const readable = getReadableStreamSomehow(); + * + * // 'readable' may be triggered multiple times as data is buffered in + * readable.on('readable', () => { + * let chunk; + * console.log('Stream is readable (new data received in buffer)'); + * // Use a loop to make sure we read all currently available data + * while (null !== (chunk = readable.read())) { + * console.log(`Read ${chunk.length} bytes of data...`); + * } + * }); + * + * // 'end' will be triggered once when there is no more data available + * readable.on('end', () => { + * console.log('Reached end of stream.'); + * }); + * ``` + * + * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks + * are not concatenated. A `while` loop is necessary to consume all data + * currently in the buffer. When reading a large file `.read()` may return `null`, + * having consumed all buffered content so far, but there is still more data to + * come not yet buffered. In this case a new `'readable'` event will be emitted + * when there is more data in the buffer. Finally the `'end'` event will be + * emitted when there is no more data to come. + * + * Therefore to read a file's whole contents from a `readable`, it is necessary + * to collect chunks across multiple `'readable'` events: + * + * ```js + * const chunks = []; + * + * readable.on('readable', () => { + * let chunk; + * while (null !== (chunk = readable.read())) { + * chunks.push(chunk); + * } + * }); + * + * readable.on('end', () => { + * const content = chunks.join(''); + * }); + * ``` + * + * A `Readable` stream in object mode will always return a single item from + * a call to `readable.read(size)`, regardless of the value of the`size` argument. + * + * If the `readable.read()` method returns a chunk of data, a `'data'` event will + * also be emitted. + * + * Calling {@link read} after the `'end'` event has + * been emitted will return `null`. No runtime error will be raised. + * @since v0.9.4 + * @param size Optional argument to specify how much data to read. + */ + read(size?: number): any; + /** + * The `readable.setEncoding()` method sets the character encoding for + * data read from the `Readable` stream. + * + * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data + * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the + * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal + * string format. + * + * The `Readable` stream will properly handle multi-byte characters delivered + * through the stream that would otherwise become improperly decoded if simply + * pulled from the stream as `Buffer` objects. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.setEncoding('utf8'); + * readable.on('data', (chunk) => { + * assert.equal(typeof chunk, 'string'); + * console.log('Got %d characters of string data:', chunk.length); + * }); + * ``` + * @since v0.9.4 + * @param encoding The encoding to use. + */ + setEncoding(encoding: BufferEncoding): this; + /** + * The `readable.pause()` method will cause a stream in flowing mode to stop + * emitting `'data'` events, switching out of flowing mode. Any data that + * becomes available will remain in the internal buffer. + * + * ```js + * const readable = getReadableStreamSomehow(); + * readable.on('data', (chunk) => { + * console.log(`Received ${chunk.length} bytes of data.`); + * readable.pause(); + * console.log('There will be no additional data for 1 second.'); + * setTimeout(() => { + * console.log('Now data will start flowing again.'); + * readable.resume(); + * }, 1000); + * }); + * ``` + * + * The `readable.pause()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + pause(): this; + /** + * The `readable.resume()` method causes an explicitly paused `Readable` stream to + * resume emitting `'data'` events, switching the stream into flowing mode. + * + * The `readable.resume()` method can be used to fully consume the data from a + * stream without actually processing any of that data: + * + * ```js + * getReadableStreamSomehow() + * .resume() + * .on('end', () => { + * console.log('Reached the end, but did not read anything.'); + * }); + * ``` + * + * The `readable.resume()` method has no effect if there is a `'readable'`event listener. + * @since v0.9.4 + */ + resume(): this; + /** + * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most + * typical cases, there will be no reason to + * use this method directly. + * + * ```js + * const readable = new stream.Readable(); + * + * readable.isPaused(); // === false + * readable.pause(); + * readable.isPaused(); // === true + * readable.resume(); + * readable.isPaused(); // === false + * ``` + * @since v0.11.14 + */ + isPaused(): boolean; + /** + * The `readable.unpipe()` method detaches a `Writable` stream previously attached + * using the {@link pipe} method. + * + * If the `destination` is not specified, then _all_ pipes are detached. + * + * If the `destination` is specified, but no pipe is set up for it, then + * the method does nothing. + * + * ```js + * const fs = require('fs'); + * const readable = getReadableStreamSomehow(); + * const writable = fs.createWriteStream('file.txt'); + * // All the data from readable goes into 'file.txt', + * // but only for the first second. + * readable.pipe(writable); + * setTimeout(() => { + * console.log('Stop writing to file.txt.'); + * readable.unpipe(writable); + * console.log('Manually close the file stream.'); + * writable.end(); + * }, 1000); + * ``` + * @since v0.9.4 + * @param destination Optional specific stream to unpipe + */ + unpipe(destination?: NodeJS.WritableStream): this; + /** + * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the + * same as `readable.push(null)`, after which no more data can be written. The EOF + * signal is put at the end of the buffer and any buffered data will still be + * flushed. + * + * The `readable.unshift()` method pushes a chunk of data back into the internal + * buffer. This is useful in certain situations where a stream is being consumed by + * code that needs to "un-consume" some amount of data that it has optimistically + * pulled out of the source, so that the data can be passed on to some other party. + * + * The `stream.unshift(chunk)` method cannot be called after the `'end'` event + * has been emitted or a runtime error will be thrown. + * + * Developers using `stream.unshift()` often should consider switching to + * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. + * + * ```js + * // Pull off a header delimited by \n\n. + * // Use unshift() if we get too much. + * // Call the callback with (error, header, stream). + * const { StringDecoder } = require('string_decoder'); + * function parseHeader(stream, callback) { + * stream.on('error', callback); + * stream.on('readable', onReadable); + * const decoder = new StringDecoder('utf8'); + * let header = ''; + * function onReadable() { + * let chunk; + * while (null !== (chunk = stream.read())) { + * const str = decoder.write(chunk); + * if (str.includes('\n\n')) { + * // Found the header boundary. + * const split = str.split(/\n\n/); + * header += split.shift(); + * const remaining = split.join('\n\n'); + * const buf = Buffer.from(remaining, 'utf8'); + * stream.removeListener('error', callback); + * // Remove the 'readable' listener before unshifting. + * stream.removeListener('readable', onReadable); + * if (buf.length) + * stream.unshift(buf); + * // Now the body of the message can be read from the stream. + * callback(null, header, stream); + * return; + * } + * // Still reading the header. + * header += str; + * } + * } + * } + * ``` + * + * Unlike {@link push}, `stream.unshift(chunk)` will not + * end the reading process by resetting the internal reading state of the stream. + * This can cause unexpected results if `readable.unshift()` is called during a + * read (i.e. from within a {@link _read} implementation on a + * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, + * however it is best to simply avoid calling `readable.unshift()` while in the + * process of performing a read. + * @since v0.9.11 + * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array` or `null`. For object mode + * streams, `chunk` may be any JavaScript value. + * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. + */ + unshift(chunk: any, encoding?: BufferEncoding): void; + /** + * Prior to Node.js 0.10, streams did not implement the entire `stream` module API + * as it is currently defined. (See `Compatibility` for more information.) + * + * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable` + * stream that uses + * the old stream as its data source. + * + * It will rarely be necessary to use `readable.wrap()` but the method has been + * provided as a convenience for interacting with older Node.js applications and + * libraries. + * + * ```js + * const { OldReader } = require('./old-api-module.js'); + * const { Readable } = require('stream'); + * const oreader = new OldReader(); + * const myReader = new Readable().wrap(oreader); + * + * myReader.on('readable', () => { + * myReader.read(); // etc. + * }); + * ``` + * @since v0.9.4 + * @param stream An "old style" readable stream + */ + wrap(stream: NodeJS.ReadableStream): this; + push(chunk: any, encoding?: BufferEncoding): boolean; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable + * stream will release any internal resources and subsequent calls to `push()`will be ignored. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, but instead implement `readable._destroy()`. + * @since v8.0.0 + * @param error Error which will be passed as payload in `'error'` event + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. data + * 3. end + * 4. error + * 5. pause + * 6. readable + * 7. resume + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'data', listener: (chunk: any) => void): this; + addListener(event: 'end', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'pause', listener: () => void): this; + addListener(event: 'readable', listener: () => void): this; + addListener(event: 'resume', listener: () => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'data', chunk: any): boolean; + emit(event: 'end'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'pause'): boolean; + emit(event: 'readable'): boolean; + emit(event: 'resume'): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'data', listener: (chunk: any) => void): this; + on(event: 'end', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'pause', listener: () => void): this; + on(event: 'readable', listener: () => void): this; + on(event: 'resume', listener: () => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'data', listener: (chunk: any) => void): this; + once(event: 'end', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'pause', listener: () => void): this; + once(event: 'readable', listener: () => void): this; + once(event: 'resume', listener: () => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'data', listener: (chunk: any) => void): this; + prependListener(event: 'end', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'pause', listener: () => void): this; + prependListener(event: 'readable', listener: () => void): this; + prependListener(event: 'resume', listener: () => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'data', listener: (chunk: any) => void): this; + prependOnceListener(event: 'end', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'pause', listener: () => void): this; + prependOnceListener(event: 'readable', listener: () => void): this; + prependOnceListener(event: 'resume', listener: () => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: 'close', listener: () => void): this; + removeListener(event: 'data', listener: (chunk: any) => void): this; + removeListener(event: 'end', listener: () => void): this; + removeListener(event: 'error', listener: (err: Error) => void): this; + removeListener(event: 'pause', listener: () => void): this; + removeListener(event: 'readable', listener: () => void): this; + removeListener(event: 'resume', listener: () => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + interface WritableOptions extends StreamOptions { + decodeStrings?: boolean | undefined; + defaultEncoding?: BufferEncoding | undefined; + write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Writable, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + final?(this: Writable, callback: (error?: Error | null) => void): void; + } + /** + * @since v0.9.4 + */ + class Writable extends Stream implements NodeJS.WritableStream { + /** + * A utility method for creating a `Writable` from a web `WritableStream`. + * @since v17.0.0 + * @experimental + */ + static fromWeb(writableStream: streamWeb.WritableStream, options?: Pick): Writable; + /** + * A utility method for creating a web `WritableStream` from a `Writable`. + * @since v17.0.0 + * @experimental + */ + static toWeb(streamWritable: Writable): streamWeb.WritableStream; + /** + * Is `true` if it is safe to call `writable.write()`, which means + * the stream has not been destroyed, errored or ended. + * @since v11.4.0 + */ + readonly writable: boolean; + /** + * Is `true` after `writable.end()` has been called. This property + * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. + * @since v12.9.0 + */ + readonly writableEnded: boolean; + /** + * Is set to `true` immediately before the `'finish'` event is emitted. + * @since v12.6.0 + */ + readonly writableFinished: boolean; + /** + * Return the value of `highWaterMark` passed when creating this `Writable`. + * @since v9.3.0 + */ + readonly writableHighWaterMark: number; + /** + * This property contains the number of bytes (or objects) in the queue + * ready to be written. The value provides introspection data regarding + * the status of the `highWaterMark`. + * @since v9.4.0 + */ + readonly writableLength: number; + /** + * Getter for the property `objectMode` of a given `Writable` stream. + * @since v12.3.0 + */ + readonly writableObjectMode: boolean; + /** + * Number of times `writable.uncork()` needs to be + * called in order to fully uncork the stream. + * @since v13.2.0, v12.16.0 + */ + readonly writableCorked: number; + /** + * Is `true` after `writable.destroy()` has been called. + * @since v8.0.0 + */ + destroyed: boolean; + /** + * Is true after 'close' has been emitted. + * @since v8.0.0 + */ + readonly closed: boolean; + /** + * Returns error if the stream has been destroyed with an error. + * @since v18.0.0 + */ + readonly errored: Error | null; + /** + * Is `true` if the stream's buffer has been full and stream will emit 'drain'. + * @since v15.2.0, v14.17.0 + */ + readonly writableNeedDrain: boolean; + constructor(opts?: WritableOptions); + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + _construct?(callback: (error?: Error | null) => void): void; + _destroy(error: Error | null, callback: (error?: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + /** + * The `writable.write()` method writes some data to the stream, and calls the + * supplied `callback` once the data has been fully handled. If an error + * occurs, the `callback` will be called with the error as its + * first argument. The `callback` is called asynchronously and before `'error'` is + * emitted. + * + * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`. + * If `false` is returned, further attempts to write data to the stream should + * stop until the `'drain'` event is emitted. + * + * While a stream is not draining, calls to `write()` will buffer `chunk`, and + * return false. Once all currently buffered chunks are drained (accepted for + * delivery by the operating system), the `'drain'` event will be emitted. + * Once `write()` returns false, do not write more chunks + * until the `'drain'` event is emitted. While calling `write()` on a stream that + * is not draining is allowed, Node.js will buffer all written chunks until + * maximum memory usage occurs, at which point it will abort unconditionally. + * Even before it aborts, high memory usage will cause poor garbage collector + * performance and high RSS (which is not typically released back to the system, + * even after the memory is no longer required). Since TCP sockets may never + * drain if the remote peer does not read the data, writing a socket that is + * not draining may lead to a remotely exploitable vulnerability. + * + * Writing data while the stream is not draining is particularly + * problematic for a `Transform`, because the `Transform` streams are paused + * by default until they are piped or a `'data'` or `'readable'` event handler + * is added. + * + * If the data to be written can be generated or fetched on demand, it is + * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is + * possible to respect backpressure and avoid memory issues using the `'drain'` event: + * + * ```js + * function write(data, cb) { + * if (!stream.write(data)) { + * stream.once('drain', cb); + * } else { + * process.nextTick(cb); + * } + * } + * + * // Wait for cb to be called before doing any other write. + * write('hello', () => { + * console.log('Write completed, do more writes now.'); + * }); + * ``` + * + * A `Writable` stream in object mode will always ignore the `encoding` argument. + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param [encoding='utf8'] The encoding, if `chunk` is a string. + * @param callback Callback for when this chunk of data is flushed. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; + /** + * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. + * @since v0.11.15 + * @param encoding The new default encoding + */ + setDefaultEncoding(encoding: BufferEncoding): this; + /** + * Calling the `writable.end()` method signals that no more data will be written + * to the `Writable`. The optional `chunk` and `encoding` arguments allow one + * final additional chunk of data to be written immediately before closing the + * stream. + * + * Calling the {@link write} method after calling {@link end} will raise an error. + * + * ```js + * // Write 'hello, ' and then end with 'world!'. + * const fs = require('fs'); + * const file = fs.createWriteStream('example.txt'); + * file.write('hello, '); + * file.end('world!'); + * // Writing more now is not allowed! + * ``` + * @since v0.9.4 + * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any + * JavaScript value other than `null`. + * @param encoding The encoding if `chunk` is a string + * @param callback Callback for when the stream is finished. + */ + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; + /** + * The `writable.cork()` method forces all written data to be buffered in memory. + * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. + * + * The primary intent of `writable.cork()` is to accommodate a situation in which + * several small chunks are written to the stream in rapid succession. Instead of + * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them + * all to `writable._writev()`, if present. This prevents a head-of-line blocking + * situation where data is being buffered while waiting for the first small chunk + * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput. + * + * See also: `writable.uncork()`, `writable._writev()`. + * @since v0.11.2 + */ + cork(): void; + /** + * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. + * + * When using `writable.cork()` and `writable.uncork()` to manage the buffering + * of writes to a stream, defer calls to `writable.uncork()` using`process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event + * loop phase. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.write('data '); + * process.nextTick(() => stream.uncork()); + * ``` + * + * If the `writable.cork()` method is called multiple times on a stream, the + * same number of calls to `writable.uncork()` must be called to flush the buffered + * data. + * + * ```js + * stream.cork(); + * stream.write('some '); + * stream.cork(); + * stream.write('data '); + * process.nextTick(() => { + * stream.uncork(); + * // The data will not be flushed until uncork() is called a second time. + * stream.uncork(); + * }); + * ``` + * + * See also: `writable.cork()`. + * @since v0.11.2 + */ + uncork(): void; + /** + * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable + * stream has ended and subsequent calls to `write()` or `end()` will result in + * an `ERR_STREAM_DESTROYED` error. + * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. + * Use `end()` instead of destroy if data should flush before close, or wait for + * the `'drain'` event before destroying the stream. + * + * Once `destroy()` has been called any further calls will be a no-op and no + * further errors except from `_destroy()` may be emitted as `'error'`. + * + * Implementors should not override this method, + * but instead implement `writable._destroy()`. + * @since v8.0.0 + * @param error Optional, an error to emit with `'error'` event. + */ + destroy(error?: Error): this; + /** + * Event emitter + * The defined events on documents including: + * 1. close + * 2. drain + * 3. error + * 4. finish + * 5. pipe + * 6. unpipe + */ + addListener(event: 'close', listener: () => void): this; + addListener(event: 'drain', listener: () => void): this; + addListener(event: 'error', listener: (err: Error) => void): this; + addListener(event: 'finish', listener: () => void): this; + addListener(event: 'pipe', listener: (src: Readable) => void): this; + addListener(event: 'unpipe', listener: (src: Readable) => void): this; + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + emit(event: 'close'): boolean; + emit(event: 'drain'): boolean; + emit(event: 'error', err: Error): boolean; + emit(event: 'finish'): boolean; + emit(event: 'pipe', src: Readable): boolean; + emit(event: 'unpipe', src: Readable): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'close', listener: () => void): this; + on(event: 'drain', listener: () => void): this; + on(event: 'error', listener: (err: Error) => void): this; + on(event: 'finish', listener: () => void): this; + on(event: 'pipe', listener: (src: Readable) => void): this; + on(event: 'unpipe', listener: (src: Readable) => void): this; + on(event: string | symbol, listener: (...args: any[]) => void): this; + once(event: 'close', listener: () => void): this; + once(event: 'drain', listener: () => void): this; + once(event: 'error', listener: (err: Error) => void): this; + once(event: 'finish', listener: () => void): this; + once(event: 'pipe', listener: (src: Readable) => void): this; + once(event: 'unpipe', listener: (src: Readable) => void): this; + once(event: string | symbol, listener: (...args: any[]) => void): this; + prependListener(event: 'close', listener: () => void): this; + prependListener(event: 'drain', listener: () => void): this; + prependListener(event: 'error', listener: (err: Error) => void): this; + prependListener(event: 'finish', listener: () => void): this; + prependListener(event: 'pipe', listener: (src: Readable) => void): this; + prependListener(event: 'unpipe', listener: (src: Readable) => void): this; + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'close', listener: () => void): this; + prependOnceListener(event: 'drain', listener: () => void): this; + prependOnceListener(event: 'error', listener: (err: Error) => void): this; + prependOnceListener(event: 'finish', listener: () => void): this; + prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this; + prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this; + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; + removeListener(event: 'close', listener: () => void): this; + removeListener(event: 'drain', listener: () => void): this; + removeListener(event: 'error', listener: (err: Error) => void): this; + removeListener(event: 'finish', listener: () => void): this; + removeListener(event: 'pipe', listener: (src: Readable) => void): this; + removeListener(event: 'unpipe', listener: (src: Readable) => void): this; + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + } + interface DuplexOptions extends ReadableOptions, WritableOptions { + allowHalfOpen?: boolean | undefined; + readableObjectMode?: boolean | undefined; + writableObjectMode?: boolean | undefined; + readableHighWaterMark?: number | undefined; + writableHighWaterMark?: number | undefined; + writableCorked?: number | undefined; + construct?(this: Duplex, callback: (error?: Error | null) => void): void; + read?(this: Duplex, size: number): void; + write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Duplex, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + final?(this: Duplex, callback: (error?: Error | null) => void): void; + destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void; + } + /** + * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Duplex` streams include: + * + * * `TCP sockets` + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Duplex extends Readable implements Writable { + readonly writable: boolean; + readonly writableEnded: boolean; + readonly writableFinished: boolean; + readonly writableHighWaterMark: number; + readonly writableLength: number; + readonly writableObjectMode: boolean; + readonly writableCorked: number; + readonly writableNeedDrain: boolean; + readonly closed: boolean; + readonly errored: Error | null; + /** + * If `false` then the stream will automatically end the writable side when the + * readable side ends. Set initially by the `allowHalfOpen` constructor option, + * which defaults to `false`. + * + * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is + * emitted. + * @since v0.9.4 + */ + allowHalfOpen: boolean; + constructor(opts?: DuplexOptions); + /** + * A utility method for creating duplex streams. + * + * - `Stream` converts writable stream into writable `Duplex` and readable stream + * to `Duplex`. + * - `Blob` converts into readable `Duplex`. + * - `string` converts into readable `Duplex`. + * - `ArrayBuffer` converts into readable `Duplex`. + * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. + * - `AsyncGeneratorFunction` converts into a readable/writable transform + * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield + * `null`. + * - `AsyncFunction` converts into a writable `Duplex`. Must return + * either `null` or `undefined` + * - `Object ({ writable, readable })` converts `readable` and + * `writable` into `Stream` and then combines them into `Duplex` where the + * `Duplex` will write to the `writable` and read from the `readable`. + * - `Promise` converts into readable `Duplex`. Value `null` is ignored. + * + * @since v16.8.0 + */ + static from(src: Stream | NodeBlob | ArrayBuffer | string | Iterable | AsyncIterable | AsyncGeneratorFunction | Promise | Object): Duplex; + _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + _writev?( + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + _destroy(error: Error | null, callback: (error: Error | null) => void): void; + _final(callback: (error?: Error | null) => void): void; + write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; + write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; + setDefaultEncoding(encoding: BufferEncoding): this; + end(cb?: () => void): this; + end(chunk: any, cb?: () => void): this; + end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; + cork(): void; + uncork(): void; + } + type TransformCallback = (error?: Error | null, data?: any) => void; + interface TransformOptions extends DuplexOptions { + construct?(this: Transform, callback: (error?: Error | null) => void): void; + read?(this: Transform, size: number): void; + write?(this: Transform, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; + writev?( + this: Transform, + chunks: Array<{ + chunk: any; + encoding: BufferEncoding; + }>, + callback: (error?: Error | null) => void + ): void; + final?(this: Transform, callback: (error?: Error | null) => void): void; + destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void; + transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + flush?(this: Transform, callback: TransformCallback): void; + } + /** + * Transform streams are `Duplex` streams where the output is in some way + * related to the input. Like all `Duplex` streams, `Transform` streams + * implement both the `Readable` and `Writable` interfaces. + * + * Examples of `Transform` streams include: + * + * * `zlib streams` + * * `crypto streams` + * @since v0.9.4 + */ + class Transform extends Duplex { + constructor(opts?: TransformOptions); + _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; + _flush(callback: TransformCallback): void; + } + /** + * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is + * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams. + */ + class PassThrough extends Transform {} + /** + * Attaches an AbortSignal to a readable or writeable stream. This lets code + * control stream destruction using an `AbortController`. + * + * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream. + * + * ```js + * const fs = require('fs'); + * + * const controller = new AbortController(); + * const read = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')) + * ); + * // Later, abort the operation closing the stream + * controller.abort(); + * ``` + * + * Or using an `AbortSignal` with a readable stream as an async iterable: + * + * ```js + * const controller = new AbortController(); + * setTimeout(() => controller.abort(), 10_000); // set a timeout + * const stream = addAbortSignal( + * controller.signal, + * fs.createReadStream(('object.json')) + * ); + * (async () => { + * try { + * for await (const chunk of stream) { + * await process(chunk); + * } + * } catch (e) { + * if (e.name === 'AbortError') { + * // The operation was cancelled + * } else { + * throw e; + * } + * } + * })(); + * ``` + * @since v15.4.0 + * @param signal A signal representing possible cancellation + * @param stream a stream to attach a signal to + */ + function addAbortSignal(signal: AbortSignal, stream: T): T; + interface FinishedOptions extends Abortable { + error?: boolean | undefined; + readable?: boolean | undefined; + writable?: boolean | undefined; + } + /** + * A function to get notified when a stream is no longer readable, writable + * or has experienced an error or a premature close event. + * + * ```js + * const { finished } = require('stream'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * finished(rs, (err) => { + * if (err) { + * console.error('Stream failed.', err); + * } else { + * console.log('Stream is done reading.'); + * } + * }); + * + * rs.resume(); // Drain the stream. + * ``` + * + * Especially useful in error handling scenarios where a stream is destroyed + * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`. + * + * The `finished` API provides promise version: + * + * ```js + * const { finished } = require('stream/promises'); + * + * const rs = fs.createReadStream('archive.tar'); + * + * async function run() { + * await finished(rs); + * console.log('Stream is done reading.'); + * } + * + * run().catch(console.error); + * rs.resume(); // Drain the stream. + * ``` + * + * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been + * invoked. The reason for this is so that unexpected `'error'` events (due to + * incorrect stream implementations) do not cause unexpected crashes. + * If this is unwanted behavior then the returned cleanup function needs to be + * invoked in the callback: + * + * ```js + * const cleanup = finished(rs, (err) => { + * cleanup(); + * // ... + * }); + * ``` + * @since v10.0.0 + * @param stream A readable and/or writable stream. + * @param callback A callback function that takes an optional error argument. + * @return A cleanup function which removes all registered listeners. + */ + function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void; + function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void; + namespace finished { + function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise; + } + type PipelineSourceFunction = () => Iterable | AsyncIterable; + type PipelineSource = Iterable | AsyncIterable | NodeJS.ReadableStream | PipelineSourceFunction; + type PipelineTransform, U> = + | NodeJS.ReadWriteStream + | ((source: S extends (...args: any[]) => Iterable | AsyncIterable ? AsyncIterable : S) => AsyncIterable); + type PipelineTransformSource = PipelineSource | PipelineTransform; + type PipelineDestinationIterableFunction = (source: AsyncIterable) => AsyncIterable; + type PipelineDestinationPromiseFunction = (source: AsyncIterable) => Promise

; + type PipelineDestination, P> = S extends PipelineTransformSource + ? NodeJS.WritableStream | PipelineDestinationIterableFunction | PipelineDestinationPromiseFunction + : never; + type PipelineCallback> = S extends PipelineDestinationPromiseFunction + ? (err: NodeJS.ErrnoException | null, value: P) => void + : (err: NodeJS.ErrnoException | null) => void; + type PipelinePromise> = S extends PipelineDestinationPromiseFunction ? Promise

: Promise; + interface PipelineOptions { + signal: AbortSignal; + } + /** + * A module method to pipe between streams and generators forwarding errors and + * properly cleaning up and provide a callback when the pipeline is complete. + * + * ```js + * const { pipeline } = require('stream'); + * const fs = require('fs'); + * const zlib = require('zlib'); + * + * // Use the pipeline API to easily pipe a series of streams + * // together and get notified when the pipeline is fully done. + * + * // A pipeline to gzip a potentially huge tar file efficiently: + * + * pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * (err) => { + * if (err) { + * console.error('Pipeline failed.', err); + * } else { + * console.log('Pipeline succeeded.'); + * } + * } + * ); + * ``` + * + * The `pipeline` API provides a promise version, which can also + * receive an options argument as the last parameter with a`signal` `AbortSignal` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with + * an`AbortError`. + * + * ```js + * const { pipeline } = require('stream/promises'); + * + * async function run() { + * await pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz') + * ); + * console.log('Pipeline succeeded.'); + * } + * + * run().catch(console.error); + * ``` + * + * To use an `AbortSignal`, pass it inside an options object, + * as the last argument: + * + * ```js + * const { pipeline } = require('stream/promises'); + * + * async function run() { + * const ac = new AbortController(); + * const signal = ac.signal; + * + * setTimeout(() => ac.abort(), 1); + * await pipeline( + * fs.createReadStream('archive.tar'), + * zlib.createGzip(), + * fs.createWriteStream('archive.tar.gz'), + * { signal }, + * ); + * } + * + * run().catch(console.error); // AbortError + * ``` + * + * The `pipeline` API also supports async generators: + * + * ```js + * const { pipeline } = require('stream/promises'); + * const fs = require('fs'); + * + * async function run() { + * await pipeline( + * fs.createReadStream('lowercase.txt'), + * async function* (source, { signal }) { + * source.setEncoding('utf8'); // Work with strings rather than `Buffer`s. + * for await (const chunk of source) { + * yield await processChunk(chunk, { signal }); + * } + * }, + * fs.createWriteStream('uppercase.txt') + * ); + * console.log('Pipeline succeeded.'); + * } + * + * run().catch(console.error); + * ``` + * + * Remember to handle the `signal` argument passed into the async generator. + * Especially in the case where the async generator is the source for the + * pipeline (i.e. first argument) or the pipeline will never complete. + * + * ```js + * const { pipeline } = require('stream/promises'); + * const fs = require('fs'); + * + * async function run() { + * await pipeline( + * async function* ({ signal }) { + * await someLongRunningfn({ signal }); + * yield 'asd'; + * }, + * fs.createWriteStream('uppercase.txt') + * ); + * console.log('Pipeline succeeded.'); + * } + * + * run().catch(console.error); + * ``` + * + * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: + * + * * `Readable` streams which have emitted `'end'` or `'close'`. + * * `Writable` streams which have emitted `'finish'` or `'close'`. + * + * `stream.pipeline()` leaves dangling event listeners on the streams + * after the `callback` has been invoked. In the case of reuse of streams after + * failure, this can cause event listener leaks and swallowed errors. If the last + * stream is readable, dangling event listeners will be removed so that the last + * stream can be consumed later. + * + * `stream.pipeline()` closes all the streams when an error is raised. + * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior + * once it would destroy the socket without sending the expected response. + * See the example below: + * + * ```js + * const fs = require('fs'); + * const http = require('http'); + * const { pipeline } = require('stream'); + * + * const server = http.createServer((req, res) => { + * const fileStream = fs.createReadStream('./fileNotExist.txt'); + * pipeline(fileStream, res, (err) => { + * if (err) { + * console.log(err); // No such file + * // this message can't be sent once `pipeline` already destroyed the socket + * return res.end('error!!!'); + * } + * }); + * }); + * ``` + * @since v10.0.0 + * @param callback Called when the pipeline is fully done. + */ + function pipeline, B extends PipelineDestination>( + source: A, + destination: B, + callback?: PipelineCallback + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline, T1 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + destination: B, + callback?: PipelineCallback + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + transform2: T2, + destination: B, + callback?: PipelineCallback + ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; + function pipeline( + streams: ReadonlyArray, + callback?: (err: NodeJS.ErrnoException | null) => void + ): NodeJS.WritableStream; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array void)> + ): NodeJS.WritableStream; + namespace pipeline { + function __promisify__, B extends PipelineDestination>(source: A, destination: B, options?: PipelineOptions): PipelinePromise; + function __promisify__, T1 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function __promisify__, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise; + function __promisify__< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise; + function __promisify__(streams: ReadonlyArray, options?: PipelineOptions): Promise; + function __promisify__( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; + } + interface Pipe { + close(): void; + hasRef(): boolean; + ref(): void; + unref(): void; + } + + /** + * Returns whether the stream has encountered an error. + * @since v17.3.0 + */ + function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; + + /** + * Returns whether the stream is readable. + * @since v17.4.0 + */ + function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; + + const promises: typeof streamPromises; + const consumers: typeof streamConsumers; + } + export = internal; +} +declare module 'node:stream' { + import stream = require('stream'); + export = stream; +} diff --git a/node_modules/@types/node/ts4.8/stream/consumers.d.ts b/node_modules/@types/node/ts4.8/stream/consumers.d.ts new file mode 100755 index 0000000..1ebf12e --- /dev/null +++ b/node_modules/@types/node/ts4.8/stream/consumers.d.ts @@ -0,0 +1,12 @@ +declare module 'stream/consumers' { + import { Blob as NodeBlob } from "node:buffer"; + import { Readable } from 'node:stream'; + function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function text(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function arrayBuffer(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function blob(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; + function json(stream: NodeJS.ReadableStream | Readable | AsyncIterator): Promise; +} +declare module 'node:stream/consumers' { + export * from 'stream/consumers'; +} diff --git a/node_modules/@types/node/ts4.8/stream/promises.d.ts b/node_modules/@types/node/ts4.8/stream/promises.d.ts new file mode 100755 index 0000000..b427073 --- /dev/null +++ b/node_modules/@types/node/ts4.8/stream/promises.d.ts @@ -0,0 +1,42 @@ +declare module 'stream/promises' { + import { FinishedOptions, PipelineSource, PipelineTransform, PipelineDestination, PipelinePromise, PipelineOptions } from 'node:stream'; + function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise; + function pipeline, B extends PipelineDestination>(source: A, destination: B, options?: PipelineOptions): PipelinePromise; + function pipeline, T1 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function pipeline, T1 extends PipelineTransform, T2 extends PipelineTransform, B extends PipelineDestination>( + source: A, + transform1: T1, + transform2: T2, + destination: B, + options?: PipelineOptions + ): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise; + function pipeline< + A extends PipelineSource, + T1 extends PipelineTransform, + T2 extends PipelineTransform, + T3 extends PipelineTransform, + T4 extends PipelineTransform, + B extends PipelineDestination + >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise; + function pipeline(streams: ReadonlyArray, options?: PipelineOptions): Promise; + function pipeline( + stream1: NodeJS.ReadableStream, + stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, + ...streams: Array + ): Promise; +} +declare module 'node:stream/promises' { + export * from 'stream/promises'; +} diff --git a/node_modules/@types/node/ts4.8/stream/web.d.ts b/node_modules/@types/node/ts4.8/stream/web.d.ts new file mode 100755 index 0000000..f9ef057 --- /dev/null +++ b/node_modules/@types/node/ts4.8/stream/web.d.ts @@ -0,0 +1,330 @@ +declare module 'stream/web' { + // stub module, pending copy&paste from .d.ts or manual impl + // copy from lib.dom.d.ts + interface ReadableWritablePair { + readable: ReadableStream; + /** + * Provides a convenient, chainable way of piping this readable stream + * through a transform stream (or any other { writable, readable } + * pair). It simply pipes the stream into the writable side of the + * supplied pair, and returns the readable side for further use. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + */ + writable: WritableStream; + } + interface StreamPipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + /** + * Pipes this readable stream to a given writable stream destination. + * The way in which the piping process behaves under various error + * conditions can be customized with a number of passed options. It + * returns a promise that fulfills when the piping process completes + * successfully, or rejects if any errors were encountered. + * + * Piping a stream will lock it for the duration of the pipe, preventing + * any other consumer from acquiring a reader. + * + * Errors and closures of the source and destination streams propagate + * as follows: + * + * An error in this source readable stream will abort destination, + * unless preventAbort is truthy. The returned promise will be rejected + * with the source's error, or with any error that occurs during + * aborting the destination. + * + * An error in destination will cancel this source readable stream, + * unless preventCancel is truthy. The returned promise will be rejected + * with the destination's error, or with any error that occurs during + * canceling the source. + * + * When this source readable stream closes, destination will be closed, + * unless preventClose is truthy. The returned promise will be fulfilled + * once this process completes, unless an error is encountered while + * closing the destination, in which case it will be rejected with that + * error. + * + * If destination starts out closed or closing, this source readable + * stream will be canceled, unless preventCancel is true. The returned + * promise will be rejected with an error indicating piping to a closed + * stream failed, or with any error that occurs during canceling the + * source. + * + * The signal option can be set to an AbortSignal to allow aborting an + * ongoing pipe operation via the corresponding AbortController. In this + * case, this source readable stream will be canceled, and destination + * aborted, unless the respective options preventCancel or preventAbort + * are set. + */ + preventClose?: boolean; + signal?: AbortSignal; + } + interface ReadableStreamGenericReader { + readonly closed: Promise; + cancel(reason?: any): Promise; + } + interface ReadableStreamDefaultReadValueResult { + done: false; + value: T; + } + interface ReadableStreamDefaultReadDoneResult { + done: true; + value?: undefined; + } + type ReadableStreamController = ReadableStreamDefaultController; + type ReadableStreamDefaultReadResult = ReadableStreamDefaultReadValueResult | ReadableStreamDefaultReadDoneResult; + interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; + } + interface UnderlyingSinkAbortCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSinkCloseCallback { + (): void | PromiseLike; + } + interface UnderlyingSinkStartCallback { + (controller: WritableStreamDefaultController): any; + } + interface UnderlyingSinkWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike; + } + interface UnderlyingSourceCancelCallback { + (reason?: any): void | PromiseLike; + } + interface UnderlyingSourcePullCallback { + (controller: ReadableStreamController): void | PromiseLike; + } + interface UnderlyingSourceStartCallback { + (controller: ReadableStreamController): any; + } + interface TransformerFlushCallback { + (controller: TransformStreamDefaultController): void | PromiseLike; + } + interface TransformerStartCallback { + (controller: TransformStreamDefaultController): any; + } + interface TransformerTransformCallback { + (chunk: I, controller: TransformStreamDefaultController): void | PromiseLike; + } + interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: 'bytes'; + } + interface UnderlyingSource { + cancel?: UnderlyingSourceCancelCallback; + pull?: UnderlyingSourcePullCallback; + start?: UnderlyingSourceStartCallback; + type?: undefined; + } + interface UnderlyingSink { + abort?: UnderlyingSinkAbortCallback; + close?: UnderlyingSinkCloseCallback; + start?: UnderlyingSinkStartCallback; + type?: undefined; + write?: UnderlyingSinkWriteCallback; + } + interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; + } + /** This Streams API interface represents a readable stream of byte data. */ + interface ReadableStream { + readonly locked: boolean; + cancel(reason?: any): Promise; + getReader(): ReadableStreamDefaultReader; + pipeThrough(transform: ReadableWritablePair, options?: StreamPipeOptions): ReadableStream; + pipeTo(destination: WritableStream, options?: StreamPipeOptions): Promise; + tee(): [ReadableStream, ReadableStream]; + values(options?: { preventCancel?: boolean }): AsyncIterableIterator; + [Symbol.asyncIterator](): AsyncIterableIterator; + } + const ReadableStream: { + prototype: ReadableStream; + new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy): ReadableStream; + new (underlyingSource?: UnderlyingSource, strategy?: QueuingStrategy): ReadableStream; + }; + interface ReadableStreamDefaultReader extends ReadableStreamGenericReader { + read(): Promise>; + releaseLock(): void; + } + const ReadableStreamDefaultReader: { + prototype: ReadableStreamDefaultReader; + new (stream: ReadableStream): ReadableStreamDefaultReader; + }; + const ReadableStreamBYOBReader: any; + const ReadableStreamBYOBRequest: any; + interface ReadableByteStreamController { + readonly byobRequest: undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; + } + const ReadableByteStreamController: { + prototype: ReadableByteStreamController; + new (): ReadableByteStreamController; + }; + interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk?: R): void; + error(e?: any): void; + } + const ReadableStreamDefaultController: { + prototype: ReadableStreamDefaultController; + new (): ReadableStreamDefaultController; + }; + interface Transformer { + flush?: TransformerFlushCallback; + readableType?: undefined; + start?: TransformerStartCallback; + transform?: TransformerTransformCallback; + writableType?: undefined; + } + interface TransformStream { + readonly readable: ReadableStream; + readonly writable: WritableStream; + } + const TransformStream: { + prototype: TransformStream; + new (transformer?: Transformer, writableStrategy?: QueuingStrategy, readableStrategy?: QueuingStrategy): TransformStream; + }; + interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk?: O): void; + error(reason?: any): void; + terminate(): void; + } + const TransformStreamDefaultController: { + prototype: TransformStreamDefaultController; + new (): TransformStreamDefaultController; + }; + /** + * This Streams API interface provides a standard abstraction for writing + * streaming data to a destination, known as a sink. This object comes with + * built-in back pressure and queuing. + */ + interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + close(): Promise; + getWriter(): WritableStreamDefaultWriter; + } + const WritableStream: { + prototype: WritableStream; + new (underlyingSink?: UnderlyingSink, strategy?: QueuingStrategy): WritableStream; + }; + /** + * This Streams API interface is the object returned by + * WritableStream.getWriter() and once created locks the < writer to the + * WritableStream ensuring that no other streams can write to the underlying + * sink. + */ + interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk?: W): Promise; + } + const WritableStreamDefaultWriter: { + prototype: WritableStreamDefaultWriter; + new (stream: WritableStream): WritableStreamDefaultWriter; + }; + /** + * This Streams API interface represents a controller allowing control of a + * WritableStream's state. When constructing a WritableStream, the + * underlying sink is given a corresponding WritableStreamDefaultController + * instance to manipulate. + */ + interface WritableStreamDefaultController { + error(e?: any): void; + } + const WritableStreamDefaultController: { + prototype: WritableStreamDefaultController; + new (): WritableStreamDefaultController; + }; + interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySize; + } + interface QueuingStrategySize { + (chunk?: T): number; + } + interface QueuingStrategyInit { + /** + * Creates a new ByteLengthQueuingStrategy with the provided high water + * mark. + * + * Note that the provided high water mark will not be validated ahead of + * time. Instead, if it is negative, NaN, or not a number, the resulting + * ByteLengthQueuingStrategy will cause the corresponding stream + * constructor to throw. + */ + highWaterMark: number; + } + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface ByteLengthQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const ByteLengthQueuingStrategy: { + prototype: ByteLengthQueuingStrategy; + new (init: QueuingStrategyInit): ByteLengthQueuingStrategy; + }; + /** + * This Streams API interface provides a built-in byte length queuing + * strategy that can be used when constructing streams. + */ + interface CountQueuingStrategy extends QueuingStrategy { + readonly highWaterMark: number; + readonly size: QueuingStrategySize; + } + const CountQueuingStrategy: { + prototype: CountQueuingStrategy; + new (init: QueuingStrategyInit): CountQueuingStrategy; + }; + interface TextEncoderStream { + /** Returns "utf-8". */ + readonly encoding: 'utf-8'; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextEncoderStream: { + prototype: TextEncoderStream; + new (): TextEncoderStream; + }; + interface TextDecoderOptions { + fatal?: boolean; + ignoreBOM?: boolean; + } + type BufferSource = ArrayBufferView | ArrayBuffer; + interface TextDecoderStream { + /** Returns encoding's name, lower cased. */ + readonly encoding: string; + /** Returns `true` if error mode is "fatal", and `false` otherwise. */ + readonly fatal: boolean; + /** Returns `true` if ignore BOM flag is set, and `false` otherwise. */ + readonly ignoreBOM: boolean; + readonly readable: ReadableStream; + readonly writable: WritableStream; + readonly [Symbol.toStringTag]: string; + } + const TextDecoderStream: { + prototype: TextDecoderStream; + new (label?: string, options?: TextDecoderOptions): TextDecoderStream; + }; +} +declare module 'node:stream/web' { + export * from 'stream/web'; +} diff --git a/node_modules/@types/node/ts4.8/string_decoder.d.ts b/node_modules/@types/node/ts4.8/string_decoder.d.ts new file mode 100755 index 0000000..a585804 --- /dev/null +++ b/node_modules/@types/node/ts4.8/string_decoder.d.ts @@ -0,0 +1,67 @@ +/** + * The `string_decoder` module provides an API for decoding `Buffer` objects into + * strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16 + * characters. It can be accessed using: + * + * ```js + * const { StringDecoder } = require('string_decoder'); + * ``` + * + * The following example shows the basic use of the `StringDecoder` class. + * + * ```js + * const { StringDecoder } = require('string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * const cent = Buffer.from([0xC2, 0xA2]); + * console.log(decoder.write(cent)); + * + * const euro = Buffer.from([0xE2, 0x82, 0xAC]); + * console.log(decoder.write(euro)); + * ``` + * + * When a `Buffer` instance is written to the `StringDecoder` instance, an + * internal buffer is used to ensure that the decoded string does not contain + * any incomplete multibyte characters. These are held in the buffer until the + * next call to `stringDecoder.write()` or until `stringDecoder.end()` is called. + * + * In the following example, the three UTF-8 encoded bytes of the European Euro + * symbol (`€`) are written over three separate operations: + * + * ```js + * const { StringDecoder } = require('string_decoder'); + * const decoder = new StringDecoder('utf8'); + * + * decoder.write(Buffer.from([0xE2])); + * decoder.write(Buffer.from([0x82])); + * console.log(decoder.end(Buffer.from([0xAC]))); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/string_decoder.js) + */ +declare module 'string_decoder' { + class StringDecoder { + constructor(encoding?: BufferEncoding); + /** + * Returns a decoded string, ensuring that any incomplete multibyte characters at + * the end of the `Buffer`, or `TypedArray`, or `DataView` are omitted from the + * returned string and stored in an internal buffer for the next call to`stringDecoder.write()` or `stringDecoder.end()`. + * @since v0.1.99 + * @param buffer A `Buffer`, or `TypedArray`, or `DataView` containing the bytes to decode. + */ + write(buffer: Buffer): string; + /** + * Returns any remaining input stored in the internal buffer as a string. Bytes + * representing incomplete UTF-8 and UTF-16 characters will be replaced with + * substitution characters appropriate for the character encoding. + * + * If the `buffer` argument is provided, one final call to `stringDecoder.write()`is performed before returning the remaining input. + * After `end()` is called, the `stringDecoder` object can be reused for new input. + * @since v0.9.3 + * @param buffer A `Buffer`, or `TypedArray`, or `DataView` containing the bytes to decode. + */ + end(buffer?: Buffer): string; + } +} +declare module 'node:string_decoder' { + export * from 'string_decoder'; +} diff --git a/node_modules/@types/node/ts4.8/test.d.ts b/node_modules/@types/node/ts4.8/test.d.ts new file mode 100755 index 0000000..a9b4eeb --- /dev/null +++ b/node_modules/@types/node/ts4.8/test.d.ts @@ -0,0 +1,314 @@ +/** + * The `node:test` module provides a standalone testing module. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/test.js) + */ +declare module 'node:test' { + /** + * Programmatically start the test runner. + * @since v18.9.0 + * @param options Configuration options for running tests. + * @returns A {@link TapStream} that emits events about the test execution. + */ + function run(options?: RunOptions): TapStream; + + /** + * The `test()` function is the value imported from the test module. Each invocation of this + * function results in the creation of a test point in the TAP output. + * + * The {@link TestContext} object passed to the fn argument can be used to perform actions + * related to the current test. Examples include skipping the test, adding additional TAP + * diagnostic information, or creating subtests. + * + * `test()` returns a {@link Promise} that resolves once the test completes. The return value + * can usually be discarded for top level tests. However, the return value from subtests should + * be used to prevent the parent test from finishing first and cancelling the subtest as shown + * in the following example. + * + * ```js + * test('top level test', async (t) => { + * // The setTimeout() in the following subtest would cause it to outlive its + * // parent test if 'await' is removed on the next line. Once the parent test + * // completes, it will cancel any outstanding subtests. + * await t.test('longer running subtest', async (t) => { + * return new Promise((resolve, reject) => { + * setTimeout(resolve, 1000); + * }); + * }); + * }); + * ``` + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + function test(name?: string, fn?: TestFn): Promise; + function test(name?: string, options?: TestOptions, fn?: TestFn): Promise; + function test(options?: TestOptions, fn?: TestFn): Promise; + function test(fn?: TestFn): Promise; + + /** + * @since v18.6.0 + * @param name The name of the suite, which is displayed when reporting suite results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the suite + * @param fn The function under suite. Default: A no-op function. + */ + function describe(name?: string, options?: TestOptions, fn?: SuiteFn): void; + function describe(name?: string, fn?: SuiteFn): void; + function describe(options?: TestOptions, fn?: SuiteFn): void; + function describe(fn?: SuiteFn): void; + + /** + * @since v18.6.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + */ + function it(name?: string, options?: TestOptions, fn?: ItFn): void; + function it(name?: string, fn?: ItFn): void; + function it(options?: TestOptions, fn?: ItFn): void; + function it(fn?: ItFn): void; + + /** + * The type of a function under test. The first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is passed as + * the second argument. + */ + type TestFn = (t: TestContext, done: (result?: any) => void) => any; + + /** + * The type of a function under Suite. + * If the test uses callbacks, the callback function is passed as an argument + */ + type SuiteFn = (done: (result?: any) => void) => void; + + /** + * The type of a function under test. + * If the test uses callbacks, the callback function is passed as an argument + */ + type ItFn = (done: (result?: any) => void) => any; + + interface RunOptions { + /** + * @default false + */ + concurrency?: number | boolean; + + /** + * An array containing the list of files to run. If unspecified, the test runner execution model will be used. + */ + files?: readonly string[]; + + /** + * Allows aborting an in-progress test. + * @default undefined + */ + signal?: AbortSignal; + + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + */ + timeout?: number; + } + + /** + * A successful call of the run() method will return a new TapStream object, streaming a TAP output. + * TapStream will emit events in the order of the tests' definitions. + * @since v18.9.0 + */ + interface TapStream extends NodeJS.ReadableStream { + addListener(event: 'test:diagnostic', listener: (message: string) => void): this; + addListener(event: 'test:fail', listener: (data: TestFail) => void): this; + addListener(event: 'test:pass', listener: (data: TestPass) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + emit(event: 'test:diagnostic', message: string): boolean; + emit(event: 'test:fail', data: TestFail): boolean; + emit(event: 'test:pass', data: TestPass): boolean; + emit(event: string | symbol, ...args: any[]): boolean; + on(event: 'test:diagnostic', listener: (message: string) => void): this; + on(event: 'test:fail', listener: (data: TestFail) => void): this; + on(event: 'test:pass', listener: (data: TestPass) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: 'test:diagnostic', listener: (message: string) => void): this; + once(event: 'test:fail', listener: (data: TestFail) => void): this; + once(event: 'test:pass', listener: (data: TestPass) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'test:diagnostic', listener: (message: string) => void): this; + prependListener(event: 'test:fail', listener: (data: TestFail) => void): this; + prependListener(event: 'test:pass', listener: (data: TestPass) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'test:diagnostic', listener: (message: string) => void): this; + prependOnceListener(event: 'test:fail', listener: (data: TestFail) => void): this; + prependOnceListener(event: 'test:pass', listener: (data: TestPass) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + } + + interface TestFail { + /** + * The test duration. + */ + duration: number; + + /** + * The failure casing test to fail. + */ + error: Error; + + /** + * The test name. + */ + name: string; + + /** + * The ordinal number of the test. + */ + testNumber: number; + + /** + * Present if `context.todo` is called. + */ + todo?: string; + + /** + * Present if `context.skip` is called. + */ + skip?: string; + } + + interface TestPass { + /** + * The test duration. + */ + duration: number; + + /** + * The test name. + */ + name: string; + + /** + * The ordinal number of the test. + */ + testNumber: number; + + /** + * Present if `context.todo` is called. + */ + todo?: string; + + /** + * Present if `context.skip` is called. + */ + skip?: string; + } + + /** + * An instance of `TestContext` is passed to each test function in order to interact with the + * test runner. However, the `TestContext` constructor is not exposed as part of the API. + * @since v18.0.0 + */ + interface TestContext { + /** + * This function is used to write TAP diagnostics to the output. Any diagnostic information is + * included at the end of the test's results. This function does not return a value. + * @param message Message to be displayed as a TAP diagnostic. + * @since v18.0.0 + */ + diagnostic(message: string): void; + + /** + * If `shouldRunOnlyTests` is truthy, the test context will only run tests that have the `only` + * option set. Otherwise, all tests are run. If Node.js was not started with the `--test-only` + * command-line option, this function is a no-op. + * @param shouldRunOnlyTests Whether or not to run `only` tests. + * @since v18.0.0 + */ + runOnly(shouldRunOnlyTests: boolean): void; + + /** + * This function causes the test's output to indicate the test as skipped. If `message` is + * provided, it is included in the TAP output. Calling `skip()` does not terminate execution of + * the test function. This function does not return a value. + * @param message Optional skip message to be displayed in TAP output. + * @since v18.0.0 + */ + skip(message?: string): void; + + /** + * This function adds a `TODO` directive to the test's output. If `message` is provided, it is + * included in the TAP output. Calling `todo()` does not terminate execution of the test + * function. This function does not return a value. + * @param message Optional `TODO` message to be displayed in TAP output. + * @since v18.0.0 + */ + todo(message?: string): void; + + /** + * This function is used to create subtests under the current test. This function behaves in + * the same fashion as the top level {@link test} function. + * @since v18.0.0 + * @param name The name of the test, which is displayed when reporting test results. + * Default: The `name` property of fn, or `''` if `fn` does not have a name. + * @param options Configuration options for the test + * @param fn The function under test. This first argument to this function is a + * {@link TestContext} object. If the test uses callbacks, the callback function is + * passed as the second argument. Default: A no-op function. + * @returns A {@link Promise} resolved with `undefined` once the test completes. + */ + test: typeof test; + } + + interface TestOptions { + /** + * The number of tests that can be run at the same time. If unspecified, subtests inherit this + * value from their parent. + * @default 1 + */ + concurrency?: number; + + /** + * If truthy, and the test context is configured to run `only` tests, then this test will be + * run. Otherwise, the test is skipped. + * @default false + */ + only?: boolean; + + /** + * Allows aborting an in-progress test. + * @since v18.8.0 + */ + signal?: AbortSignal; + + /** + * If truthy, the test is skipped. If a string is provided, that string is displayed in the + * test results as the reason for skipping the test. + * @default false + */ + skip?: boolean | string; + + /** + * A number of milliseconds the test will fail after. If unspecified, subtests inherit this + * value from their parent. + * @default Infinity + * @since v18.7.0 + */ + timeout?: number; + + /** + * If truthy, the test marked as `TODO`. If a string is provided, that string is displayed in + * the test results as the reason why the test is `TODO`. + * @default false + */ + todo?: boolean | string; + } + + export { test as default, run, test, describe, it }; +} diff --git a/node_modules/@types/node/ts4.8/timers.d.ts b/node_modules/@types/node/ts4.8/timers.d.ts new file mode 100755 index 0000000..b26f3ce --- /dev/null +++ b/node_modules/@types/node/ts4.8/timers.d.ts @@ -0,0 +1,94 @@ +/** + * The `timer` module exposes a global API for scheduling functions to + * be called at some future period of time. Because the timer functions are + * globals, there is no need to call `require('timers')` to use the API. + * + * The timer functions within Node.js implement a similar API as the timers API + * provided by Web Browsers but use a different internal implementation that is + * built around the Node.js [Event Loop](https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout). + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/timers.js) + */ +declare module 'timers' { + import { Abortable } from 'node:events'; + import { setTimeout as setTimeoutPromise, setImmediate as setImmediatePromise, setInterval as setIntervalPromise } from 'node:timers/promises'; + interface TimerOptions extends Abortable { + /** + * Set to `false` to indicate that the scheduled `Timeout` + * should not require the Node.js event loop to remain active. + * @default true + */ + ref?: boolean | undefined; + } + let setTimeout: typeof global.setTimeout; + let clearTimeout: typeof global.clearTimeout; + let setInterval: typeof global.setInterval; + let clearInterval: typeof global.clearInterval; + let setImmediate: typeof global.setImmediate; + let clearImmediate: typeof global.clearImmediate; + global { + namespace NodeJS { + // compatibility with older typings + interface Timer extends RefCounted { + hasRef(): boolean; + refresh(): this; + [Symbol.toPrimitive](): number; + } + interface Immediate extends RefCounted { + /** + * If true, the `Immediate` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + _onImmediate: Function; // to distinguish it from the Timeout class + } + interface Timeout extends Timer { + /** + * If true, the `Timeout` object will keep the Node.js event loop active. + * @since v11.0.0 + */ + hasRef(): boolean; + /** + * Sets the timer's start time to the current time, and reschedules the timer to + * call its callback at the previously specified duration adjusted to the current + * time. This is useful for refreshing a timer without allocating a new + * JavaScript object. + * + * Using this on a timer that has already called its callback will reactivate the + * timer. + * @since v10.2.0 + * @return a reference to `timeout` + */ + refresh(): this; + [Symbol.toPrimitive](): number; + } + } + function setTimeout(callback: (...args: TArgs) => void, ms?: number, ...args: TArgs): NodeJS.Timeout; + // util.promisify no rest args compability + // tslint:disable-next-line void-return + function setTimeout(callback: (args: void) => void, ms?: number): NodeJS.Timeout; + namespace setTimeout { + const __promisify__: typeof setTimeoutPromise; + } + function clearTimeout(timeoutId: NodeJS.Timeout | string | number | undefined): void; + function setInterval(callback: (...args: TArgs) => void, ms?: number, ...args: TArgs): NodeJS.Timer; + // util.promisify no rest args compability + // tslint:disable-next-line void-return + function setInterval(callback: (args: void) => void, ms?: number): NodeJS.Timer; + namespace setInterval { + const __promisify__: typeof setIntervalPromise; + } + function clearInterval(intervalId: NodeJS.Timeout | string | number | undefined): void; + function setImmediate(callback: (...args: TArgs) => void, ...args: TArgs): NodeJS.Immediate; + // util.promisify no rest args compability + // tslint:disable-next-line void-return + function setImmediate(callback: (args: void) => void): NodeJS.Immediate; + namespace setImmediate { + const __promisify__: typeof setImmediatePromise; + } + function clearImmediate(immediateId: NodeJS.Immediate | undefined): void; + function queueMicrotask(callback: () => void): void; + } +} +declare module 'node:timers' { + export * from 'timers'; +} diff --git a/node_modules/@types/node/ts4.8/timers/promises.d.ts b/node_modules/@types/node/ts4.8/timers/promises.d.ts new file mode 100755 index 0000000..fd77888 --- /dev/null +++ b/node_modules/@types/node/ts4.8/timers/promises.d.ts @@ -0,0 +1,68 @@ +/** + * The `timers/promises` API provides an alternative set of timer functions + * that return `Promise` objects. The API is accessible via`require('timers/promises')`. + * + * ```js + * import { + * setTimeout, + * setImmediate, + * setInterval, + * } from 'timers/promises'; + * ``` + * @since v15.0.0 + */ +declare module 'timers/promises' { + import { TimerOptions } from 'node:timers'; + /** + * ```js + * import { + * setTimeout, + * } from 'timers/promises'; + * + * const res = await setTimeout(100, 'result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param [delay=1] The number of milliseconds to wait before fulfilling the promise. + * @param value A value with which the promise is fulfilled. + */ + function setTimeout(delay?: number, value?: T, options?: TimerOptions): Promise; + /** + * ```js + * import { + * setImmediate, + * } from 'timers/promises'; + * + * const res = await setImmediate('result'); + * + * console.log(res); // Prints 'result' + * ``` + * @since v15.0.0 + * @param value A value with which the promise is fulfilled. + */ + function setImmediate(value?: T, options?: TimerOptions): Promise; + /** + * Returns an async iterator that generates values in an interval of `delay` ms. + * + * ```js + * import { + * setInterval, + * } from 'timers/promises'; + * + * const interval = 100; + * for await (const startTime of setInterval(interval, Date.now())) { + * const now = Date.now(); + * console.log(now); + * if ((now - startTime) > 1000) + * break; + * } + * console.log(Date.now()); + * ``` + * @since v15.9.0 + */ + function setInterval(delay?: number, value?: T, options?: TimerOptions): AsyncIterable; +} +declare module 'node:timers/promises' { + export * from 'timers/promises'; +} diff --git a/node_modules/@types/node/ts4.8/tls.d.ts b/node_modules/@types/node/ts4.8/tls.d.ts new file mode 100755 index 0000000..2cbc716 --- /dev/null +++ b/node_modules/@types/node/ts4.8/tls.d.ts @@ -0,0 +1,1028 @@ +/** + * The `tls` module provides an implementation of the Transport Layer Security + * (TLS) and Secure Socket Layer (SSL) protocols that is built on top of OpenSSL. + * The module can be accessed using: + * + * ```js + * const tls = require('tls'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/tls.js) + */ +declare module 'tls' { + import { X509Certificate } from 'node:crypto'; + import * as net from 'node:net'; + import * as stream from 'stream'; + const CLIENT_RENEG_LIMIT: number; + const CLIENT_RENEG_WINDOW: number; + interface Certificate { + /** + * Country code. + */ + C: string; + /** + * Street. + */ + ST: string; + /** + * Locality. + */ + L: string; + /** + * Organization. + */ + O: string; + /** + * Organizational unit. + */ + OU: string; + /** + * Common name. + */ + CN: string; + } + interface PeerCertificate { + subject: Certificate; + issuer: Certificate; + subjectaltname: string; + infoAccess: NodeJS.Dict; + modulus: string; + exponent: string; + valid_from: string; + valid_to: string; + fingerprint: string; + fingerprint256: string; + ext_key_usage: string[]; + serialNumber: string; + raw: Buffer; + } + interface DetailedPeerCertificate extends PeerCertificate { + issuerCertificate: DetailedPeerCertificate; + } + interface CipherNameAndProtocol { + /** + * The cipher name. + */ + name: string; + /** + * SSL/TLS protocol version. + */ + version: string; + /** + * IETF name for the cipher suite. + */ + standardName: string; + } + interface EphemeralKeyInfo { + /** + * The supported types are 'DH' and 'ECDH'. + */ + type: string; + /** + * The name property is available only when type is 'ECDH'. + */ + name?: string | undefined; + /** + * The size of parameter of an ephemeral key exchange. + */ + size: number; + } + interface KeyObject { + /** + * Private keys in PEM format. + */ + pem: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface PxfObject { + /** + * PFX or PKCS12 encoded private key and certificate chain. + */ + buf: string | Buffer; + /** + * Optional passphrase. + */ + passphrase?: string | undefined; + } + interface TLSSocketOptions extends SecureContextOptions, CommonConnectionOptions { + /** + * If true the TLS socket will be instantiated in server-mode. + * Defaults to false. + */ + isServer?: boolean | undefined; + /** + * An optional net.Server instance. + */ + server?: net.Server | undefined; + /** + * An optional Buffer instance containing a TLS session. + */ + session?: Buffer | undefined; + /** + * If true, specifies that the OCSP status request extension will be + * added to the client hello and an 'OCSPResponse' event will be + * emitted on the socket before establishing a secure communication + */ + requestOCSP?: boolean | undefined; + } + /** + * Performs transparent encryption of written data and all required TLS + * negotiation. + * + * Instances of `tls.TLSSocket` implement the duplex `Stream` interface. + * + * Methods that return TLS connection metadata (e.g.{@link TLSSocket.getPeerCertificate} will only return data while the + * connection is open. + * @since v0.11.4 + */ + class TLSSocket extends net.Socket { + /** + * Construct a new tls.TLSSocket object from an existing TCP socket. + */ + constructor(socket: net.Socket, options?: TLSSocketOptions); + /** + * This property is `true` if the peer certificate was signed by one of the CAs + * specified when creating the `tls.TLSSocket` instance, otherwise `false`. + * @since v0.11.4 + */ + authorized: boolean; + /** + * Returns the reason why the peer's certificate was not been verified. This + * property is set only when `tlsSocket.authorized === false`. + * @since v0.11.4 + */ + authorizationError: Error; + /** + * Always returns `true`. This may be used to distinguish TLS sockets from regular`net.Socket` instances. + * @since v0.11.4 + */ + encrypted: true; + /** + * String containing the selected ALPN protocol. + * Before a handshake has completed, this value is always null. + * When a handshake is completed but not ALPN protocol was selected, tlsSocket.alpnProtocol equals false. + */ + alpnProtocol: string | false | null; + /** + * Returns an object representing the local certificate. The returned object has + * some properties corresponding to the fields of the certificate. + * + * See {@link TLSSocket.getPeerCertificate} for an example of the certificate + * structure. + * + * If there is no local certificate, an empty object will be returned. If the + * socket has been destroyed, `null` will be returned. + * @since v11.2.0 + */ + getCertificate(): PeerCertificate | object | null; + /** + * Returns an object containing information on the negotiated cipher suite. + * + * For example: + * + * ```json + * { + * "name": "AES128-SHA256", + * "standardName": "TLS_RSA_WITH_AES_128_CBC_SHA256", + * "version": "TLSv1.2" + * } + * ``` + * + * See [SSL\_CIPHER\_get\_name](https://www.openssl.org/docs/man1.1.1/man3/SSL_CIPHER_get_name.html) for more information. + * @since v0.11.4 + */ + getCipher(): CipherNameAndProtocol; + /** + * Returns an object representing the type, name, and size of parameter of + * an ephemeral key exchange in `perfect forward secrecy` on a client + * connection. It returns an empty object when the key exchange is not + * ephemeral. As this is only supported on a client socket; `null` is returned + * if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`. + * + * For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`. + * @since v5.0.0 + */ + getEphemeralKeyInfo(): EphemeralKeyInfo | object | null; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet. + */ + getFinished(): Buffer | undefined; + /** + * Returns an object representing the peer's certificate. If the peer does not + * provide a certificate, an empty object will be returned. If the socket has been + * destroyed, `null` will be returned. + * + * If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's + * certificate. + * @since v0.11.4 + * @param detailed Include the full certificate chain if `true`, otherwise include just the peer's certificate. + * @return A certificate object. + */ + getPeerCertificate(detailed: true): DetailedPeerCertificate; + getPeerCertificate(detailed?: false): PeerCertificate; + getPeerCertificate(detailed?: boolean): PeerCertificate | DetailedPeerCertificate; + /** + * As the `Finished` messages are message digests of the complete handshake + * (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can + * be used for external authentication procedures when the authentication + * provided by SSL/TLS is not desired or is not enough. + * + * Corresponds to the `SSL_get_peer_finished` routine in OpenSSL and may be used + * to implement the `tls-unique` channel binding from [RFC 5929](https://tools.ietf.org/html/rfc5929). + * @since v9.9.0 + * @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so + * far. + */ + getPeerFinished(): Buffer | undefined; + /** + * Returns a string containing the negotiated SSL/TLS protocol version of the + * current connection. The value `'unknown'` will be returned for connected + * sockets that have not completed the handshaking process. The value `null` will + * be returned for server sockets or disconnected client sockets. + * + * Protocol versions are: + * + * * `'SSLv3'` + * * `'TLSv1'` + * * `'TLSv1.1'` + * * `'TLSv1.2'` + * * `'TLSv1.3'` + * + * See the OpenSSL [`SSL_get_version`](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html) documentation for more information. + * @since v5.7.0 + */ + getProtocol(): string | null; + /** + * Returns the TLS session data or `undefined` if no session was + * negotiated. On the client, the data can be provided to the `session` option of {@link connect} to resume the connection. On the server, it may be useful + * for debugging. + * + * See `Session Resumption` for more information. + * + * Note: `getSession()` works only for TLSv1.2 and below. For TLSv1.3, applications + * must use the `'session'` event (it also works for TLSv1.2 and below). + * @since v0.11.4 + */ + getSession(): Buffer | undefined; + /** + * See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information. + * @since v12.11.0 + * @return List of signature algorithms shared between the server and the client in the order of decreasing preference. + */ + getSharedSigalgs(): string[]; + /** + * For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`. + * + * It may be useful for debugging. + * + * See `Session Resumption` for more information. + * @since v0.11.4 + */ + getTLSTicket(): Buffer | undefined; + /** + * See `Session Resumption` for more information. + * @since v0.5.6 + * @return `true` if the session was reused, `false` otherwise. + */ + isSessionReused(): boolean; + /** + * The `tlsSocket.renegotiate()` method initiates a TLS renegotiation process. + * Upon completion, the `callback` function will be passed a single argument + * that is either an `Error` (if the request failed) or `null`. + * + * This method can be used to request a peer's certificate after the secure + * connection has been established. + * + * When running as the server, the socket will be destroyed with an error after`handshakeTimeout` timeout. + * + * For TLSv1.3, renegotiation cannot be initiated, it is not supported by the + * protocol. + * @since v0.11.8 + * @param callback If `renegotiate()` returned `true`, callback is attached once to the `'secure'` event. If `renegotiate()` returned `false`, `callback` will be called in the next tick with + * an error, unless the `tlsSocket` has been destroyed, in which case `callback` will not be called at all. + * @return `true` if renegotiation was initiated, `false` otherwise. + */ + renegotiate( + options: { + rejectUnauthorized?: boolean | undefined; + requestCert?: boolean | undefined; + }, + callback: (err: Error | null) => void + ): undefined | boolean; + /** + * The `tlsSocket.setMaxSendFragment()` method sets the maximum TLS fragment size. + * Returns `true` if setting the limit succeeded; `false` otherwise. + * + * Smaller fragment sizes decrease the buffering latency on the client: larger + * fragments are buffered by the TLS layer until the entire fragment is received + * and its integrity is verified; large fragments can span multiple roundtrips + * and their processing can be delayed due to packet loss or reordering. However, + * smaller fragments add extra TLS framing bytes and CPU overhead, which may + * decrease overall server throughput. + * @since v0.11.11 + * @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`. + */ + setMaxSendFragment(size: number): boolean; + /** + * Disables TLS renegotiation for this `TLSSocket` instance. Once called, attempts + * to renegotiate will trigger an `'error'` event on the `TLSSocket`. + * @since v8.4.0 + */ + disableRenegotiation(): void; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * + * The format of the output is identical to the output of`openssl s_client -trace` or `openssl s_server -trace`. While it is produced by + * OpenSSL's `SSL_trace()` function, the format is undocumented, can change + * without notice, and should not be relied on. + * @since v12.2.0 + */ + enableTrace(): void; + /** + * Returns the peer certificate as an `X509Certificate` object. + * + * If there is no peer certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getPeerX509Certificate(): X509Certificate | undefined; + /** + * Returns the local certificate as an `X509Certificate` object. + * + * If there is no local certificate, or the socket has been destroyed,`undefined` will be returned. + * @since v15.9.0 + */ + getX509Certificate(): X509Certificate | undefined; + /** + * Keying material is used for validations to prevent different kind of attacks in + * network protocols, for example in the specifications of IEEE 802.1X. + * + * Example + * + * ```js + * const keyingMaterial = tlsSocket.exportKeyingMaterial( + * 128, + * 'client finished'); + * + * /* + * Example return value of keyingMaterial: + * + * + * ``` + * + * See the OpenSSL [`SSL_export_keying_material`](https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html) documentation for more + * information. + * @since v13.10.0, v12.17.0 + * @param length number of bytes to retrieve from keying material + * @param label an application specific label, typically this will be a value from the [IANA Exporter Label + * Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels). + * @param context Optionally provide a context. + * @return requested bytes of the keying material + */ + exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer; + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + addListener(event: 'secureConnect', listener: () => void): this; + addListener(event: 'session', listener: (session: Buffer) => void): this; + addListener(event: 'keylog', listener: (line: Buffer) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'OCSPResponse', response: Buffer): boolean; + emit(event: 'secureConnect'): boolean; + emit(event: 'session', session: Buffer): boolean; + emit(event: 'keylog', line: Buffer): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + on(event: 'secureConnect', listener: () => void): this; + on(event: 'session', listener: (session: Buffer) => void): this; + on(event: 'keylog', listener: (line: Buffer) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + once(event: 'secureConnect', listener: () => void): this; + once(event: 'session', listener: (session: Buffer) => void): this; + once(event: 'keylog', listener: (line: Buffer) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + prependListener(event: 'secureConnect', listener: () => void): this; + prependListener(event: 'session', listener: (session: Buffer) => void): this; + prependListener(event: 'keylog', listener: (line: Buffer) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'OCSPResponse', listener: (response: Buffer) => void): this; + prependOnceListener(event: 'secureConnect', listener: () => void): this; + prependOnceListener(event: 'session', listener: (session: Buffer) => void): this; + prependOnceListener(event: 'keylog', listener: (line: Buffer) => void): this; + } + interface CommonConnectionOptions { + /** + * An optional TLS context object from tls.createSecureContext() + */ + secureContext?: SecureContext | undefined; + /** + * When enabled, TLS packet trace information is written to `stderr`. This can be + * used to debug TLS connection problems. + * @default false + */ + enableTrace?: boolean | undefined; + /** + * If true the server will request a certificate from clients that + * connect and attempt to verify that certificate. Defaults to + * false. + */ + requestCert?: boolean | undefined; + /** + * An array of strings or a Buffer naming possible ALPN protocols. + * (Protocols should be ordered by their priority.) + */ + ALPNProtocols?: string[] | Uint8Array[] | Uint8Array | undefined; + /** + * SNICallback(servername, cb) A function that will be + * called if the client supports SNI TLS extension. Two arguments + * will be passed when called: servername and cb. SNICallback should + * invoke cb(null, ctx), where ctx is a SecureContext instance. + * (tls.createSecureContext(...) can be used to get a proper + * SecureContext.) If SNICallback wasn't provided the default callback + * with high-level API will be used (see below). + */ + SNICallback?: ((servername: string, cb: (err: Error | null, ctx?: SecureContext) => void) => void) | undefined; + /** + * If true the server will reject any connection which is not + * authorized with the list of supplied CAs. This option only has an + * effect if requestCert is true. + * @default true + */ + rejectUnauthorized?: boolean | undefined; + } + interface TlsOptions extends SecureContextOptions, CommonConnectionOptions, net.ServerOpts { + /** + * Abort the connection if the SSL/TLS handshake does not finish in the + * specified number of milliseconds. A 'tlsClientError' is emitted on + * the tls.Server object whenever a handshake times out. Default: + * 120000 (120 seconds). + */ + handshakeTimeout?: number | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + */ + ticketKeys?: Buffer | undefined; + /** + * + * @param socket + * @param identity identity parameter sent from the client. + * @return pre-shared key that must either be + * a buffer or `null` to stop the negotiation process. Returned PSK must be + * compatible with the selected cipher's digest. + * + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with the identity provided by the client. + * If the return value is `null` the negotiation process will stop and an + * "unknown_psk_identity" alert message will be sent to the other party. + * If the server wishes to hide the fact that the PSK identity was not known, + * the callback must provide some random data as `psk` to make the connection + * fail with "decrypt_error" before negotiation is finished. + * PSK ciphers are disabled by default, and using TLS-PSK thus + * requires explicitly specifying a cipher suite with the `ciphers` option. + * More information can be found in the RFC 4279. + */ + pskCallback?(socket: TLSSocket, identity: string): DataView | NodeJS.TypedArray | null; + /** + * hint to send to a client to help + * with selecting the identity during TLS-PSK negotiation. Will be ignored + * in TLS 1.3. Upon failing to set pskIdentityHint `tlsClientError` will be + * emitted with `ERR_TLS_PSK_SET_IDENTIY_HINT_FAILED` code. + */ + pskIdentityHint?: string | undefined; + } + interface PSKCallbackNegotation { + psk: DataView | NodeJS.TypedArray; + identity: string; + } + interface ConnectionOptions extends SecureContextOptions, CommonConnectionOptions { + host?: string | undefined; + port?: number | undefined; + path?: string | undefined; // Creates unix socket connection to path. If this option is specified, `host` and `port` are ignored. + socket?: stream.Duplex | undefined; // Establish secure connection on a given socket rather than creating a new socket + checkServerIdentity?: typeof checkServerIdentity | undefined; + servername?: string | undefined; // SNI TLS Extension + session?: Buffer | undefined; + minDHSize?: number | undefined; + lookup?: net.LookupFunction | undefined; + timeout?: number | undefined; + /** + * When negotiating TLS-PSK (pre-shared keys), this function is called + * with optional identity `hint` provided by the server or `null` + * in case of TLS 1.3 where `hint` was removed. + * It will be necessary to provide a custom `tls.checkServerIdentity()` + * for the connection as the default one will try to check hostname/IP + * of the server against the certificate but that's not applicable for PSK + * because there won't be a certificate present. + * More information can be found in the RFC 4279. + * + * @param hint message sent from the server to help client + * decide which identity to use during negotiation. + * Always `null` if TLS 1.3 is used. + * @returns Return `null` to stop the negotiation process. `psk` must be + * compatible with the selected cipher's digest. + * `identity` must use UTF-8 encoding. + */ + pskCallback?(hint: string | null): PSKCallbackNegotation | null; + } + /** + * Accepts encrypted connections using TLS or SSL. + * @since v0.3.2 + */ + class Server extends net.Server { + constructor(secureConnectionListener?: (socket: TLSSocket) => void); + constructor(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void); + /** + * The `server.addContext()` method adds a secure context that will be used if + * the client request's SNI name matches the supplied `hostname` (or wildcard). + * + * When there are multiple matching contexts, the most recently added one is + * used. + * @since v0.5.3 + * @param hostname A SNI host name or wildcard (e.g. `'*'`) + * @param context An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + addContext(hostname: string, context: SecureContextOptions): void; + /** + * Returns the session ticket keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @return A 48-byte buffer containing the session ticket keys. + */ + getTicketKeys(): Buffer; + /** + * The `server.setSecureContext()` method replaces the secure context of an + * existing server. Existing connections to the server are not interrupted. + * @since v11.0.0 + * @param options An object containing any of the possible properties from the {@link createSecureContext} `options` arguments (e.g. `key`, `cert`, `ca`, etc). + */ + setSecureContext(options: SecureContextOptions): void; + /** + * Sets the session ticket keys. + * + * Changes to the ticket keys are effective only for future server connections. + * Existing or currently pending server connections will use the previous keys. + * + * See `Session Resumption` for more information. + * @since v3.0.0 + * @param keys A 48-byte buffer containing the session ticket keys. + */ + setTicketKeys(keys: Buffer): void; + /** + * events.EventEmitter + * 1. tlsClientError + * 2. newSession + * 3. OCSPRequest + * 4. resumeSession + * 5. secureConnection + * 6. keylog + */ + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + addListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + addListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + addListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + addListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + addListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'tlsClientError', err: Error, tlsSocket: TLSSocket): boolean; + emit(event: 'newSession', sessionId: Buffer, sessionData: Buffer, callback: () => void): boolean; + emit(event: 'OCSPRequest', certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void): boolean; + emit(event: 'resumeSession', sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void): boolean; + emit(event: 'secureConnection', tlsSocket: TLSSocket): boolean; + emit(event: 'keylog', line: Buffer, tlsSocket: TLSSocket): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + on(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + on(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + on(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + on(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + on(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + once(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + once(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + once(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + once(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + once(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + prependListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + prependListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + prependListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + prependListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'tlsClientError', listener: (err: Error, tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: 'newSession', listener: (sessionId: Buffer, sessionData: Buffer, callback: () => void) => void): this; + prependOnceListener(event: 'OCSPRequest', listener: (certificate: Buffer, issuer: Buffer, callback: (err: Error | null, resp: Buffer) => void) => void): this; + prependOnceListener(event: 'resumeSession', listener: (sessionId: Buffer, callback: (err: Error | null, sessionData: Buffer | null) => void) => void): this; + prependOnceListener(event: 'secureConnection', listener: (tlsSocket: TLSSocket) => void): this; + prependOnceListener(event: 'keylog', listener: (line: Buffer, tlsSocket: TLSSocket) => void): this; + } + /** + * @deprecated since v0.11.3 Use `tls.TLSSocket` instead. + */ + interface SecurePair { + encrypted: TLSSocket; + cleartext: TLSSocket; + } + type SecureVersion = 'TLSv1.3' | 'TLSv1.2' | 'TLSv1.1' | 'TLSv1'; + interface SecureContextOptions { + /** + * Optionally override the trusted CA certificates. Default is to trust + * the well-known CAs curated by Mozilla. Mozilla's CAs are completely + * replaced when CAs are explicitly specified using this option. + */ + ca?: string | Buffer | Array | undefined; + /** + * Cert chains in PEM format. One cert chain should be provided per + * private key. Each cert chain should consist of the PEM formatted + * certificate for a provided private key, followed by the PEM + * formatted intermediate certificates (if any), in order, and not + * including the root CA (the root CA must be pre-known to the peer, + * see ca). When providing multiple cert chains, they do not have to + * be in the same order as their private keys in key. If the + * intermediate certificates are not provided, the peer will not be + * able to validate the certificate, and the handshake will fail. + */ + cert?: string | Buffer | Array | undefined; + /** + * Colon-separated list of supported signature algorithms. The list + * can contain digest algorithms (SHA256, MD5 etc.), public key + * algorithms (RSA-PSS, ECDSA etc.), combination of both (e.g + * 'RSA+SHA384') or TLS v1.3 scheme names (e.g. rsa_pss_pss_sha512). + */ + sigalgs?: string | undefined; + /** + * Cipher suite specification, replacing the default. For more + * information, see modifying the default cipher suite. Permitted + * ciphers can be obtained via tls.getCiphers(). Cipher names must be + * uppercased in order for OpenSSL to accept them. + */ + ciphers?: string | undefined; + /** + * Name of an OpenSSL engine which can provide the client certificate. + */ + clientCertEngine?: string | undefined; + /** + * PEM formatted CRLs (Certificate Revocation Lists). + */ + crl?: string | Buffer | Array | undefined; + /** + * Diffie Hellman parameters, required for Perfect Forward Secrecy. Use + * openssl dhparam to create the parameters. The key length must be + * greater than or equal to 1024 bits or else an error will be thrown. + * Although 1024 bits is permissible, use 2048 bits or larger for + * stronger security. If omitted or invalid, the parameters are + * silently discarded and DHE ciphers will not be available. + */ + dhparam?: string | Buffer | undefined; + /** + * A string describing a named curve or a colon separated list of curve + * NIDs or names, for example P-521:P-384:P-256, to use for ECDH key + * agreement. Set to auto to select the curve automatically. Use + * crypto.getCurves() to obtain a list of available curve names. On + * recent releases, openssl ecparam -list_curves will also display the + * name and description of each available elliptic curve. Default: + * tls.DEFAULT_ECDH_CURVE. + */ + ecdhCurve?: string | undefined; + /** + * Attempt to use the server's cipher suite preferences instead of the + * client's. When true, causes SSL_OP_CIPHER_SERVER_PREFERENCE to be + * set in secureOptions + */ + honorCipherOrder?: boolean | undefined; + /** + * Private keys in PEM format. PEM allows the option of private keys + * being encrypted. Encrypted keys will be decrypted with + * options.passphrase. Multiple keys using different algorithms can be + * provided either as an array of unencrypted key strings or buffers, + * or an array of objects in the form {pem: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted keys will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + key?: string | Buffer | Array | undefined; + /** + * Name of an OpenSSL engine to get private key from. Should be used + * together with privateKeyIdentifier. + */ + privateKeyEngine?: string | undefined; + /** + * Identifier of a private key managed by an OpenSSL engine. Should be + * used together with privateKeyEngine. Should not be set together with + * key, because both options define a private key in different ways. + */ + privateKeyIdentifier?: string | undefined; + /** + * Optionally set the maximum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. + * **Default:** `'TLSv1.3'`, unless changed using CLI options. Using + * `--tls-max-v1.2` sets the default to `'TLSv1.2'`. Using `--tls-max-v1.3` sets the default to + * `'TLSv1.3'`. If multiple of the options are provided, the highest maximum is used. + */ + maxVersion?: SecureVersion | undefined; + /** + * Optionally set the minimum TLS version to allow. One + * of `'TLSv1.3'`, `'TLSv1.2'`, `'TLSv1.1'`, or `'TLSv1'`. Cannot be specified along with the + * `secureProtocol` option, use one or the other. It is not recommended to use + * less than TLSv1.2, but it may be required for interoperability. + * **Default:** `'TLSv1.2'`, unless changed using CLI options. Using + * `--tls-v1.0` sets the default to `'TLSv1'`. Using `--tls-v1.1` sets the default to + * `'TLSv1.1'`. Using `--tls-min-v1.3` sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the lowest minimum is used. + */ + minVersion?: SecureVersion | undefined; + /** + * Shared passphrase used for a single private key and/or a PFX. + */ + passphrase?: string | undefined; + /** + * PFX or PKCS12 encoded private key and certificate chain. pfx is an + * alternative to providing key and cert individually. PFX is usually + * encrypted, if it is, passphrase will be used to decrypt it. Multiple + * PFX can be provided either as an array of unencrypted PFX buffers, + * or an array of objects in the form {buf: [, + * passphrase: ]}. The object form can only occur in an array. + * object.passphrase is optional. Encrypted PFX will be decrypted with + * object.passphrase if provided, or options.passphrase if it is not. + */ + pfx?: string | Buffer | Array | undefined; + /** + * Optionally affect the OpenSSL protocol behavior, which is not + * usually necessary. This should be used carefully if at all! Value is + * a numeric bitmask of the SSL_OP_* options from OpenSSL Options + */ + secureOptions?: number | undefined; // Value is a numeric bitmask of the `SSL_OP_*` options + /** + * Legacy mechanism to select the TLS protocol version to use, it does + * not support independent control of the minimum and maximum version, + * and does not support limiting the protocol to TLSv1.3. Use + * minVersion and maxVersion instead. The possible values are listed as + * SSL_METHODS, use the function names as strings. For example, use + * 'TLSv1_1_method' to force TLS version 1.1, or 'TLS_method' to allow + * any TLS protocol version up to TLSv1.3. It is not recommended to use + * TLS versions less than 1.2, but it may be required for + * interoperability. Default: none, see minVersion. + */ + secureProtocol?: string | undefined; + /** + * Opaque identifier used by servers to ensure session state is not + * shared between applications. Unused by clients. + */ + sessionIdContext?: string | undefined; + /** + * 48-bytes of cryptographically strong pseudo-random data. + * See Session Resumption for more information. + */ + ticketKeys?: Buffer | undefined; + /** + * The number of seconds after which a TLS session created by the + * server will no longer be resumable. See Session Resumption for more + * information. Default: 300. + */ + sessionTimeout?: number | undefined; + } + interface SecureContext { + context: any; + } + /** + * Verifies the certificate `cert` is issued to `hostname`. + * + * Returns [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) object, populating it with `reason`, `host`, and `cert` on + * failure. On success, returns [undefined](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Undefined_type). + * + * This function is intended to be used in combination with the`checkServerIdentity` option that can be passed to {@link connect} and as + * such operates on a `certificate object`. For other purposes, consider using `x509.checkHost()` instead. + * + * This function can be overwritten by providing an alternative function as the`options.checkServerIdentity` option that is passed to `tls.connect()`. The + * overwriting function can call `tls.checkServerIdentity()` of course, to augment + * the checks done with additional verification. + * + * This function is only called if the certificate passed all other checks, such as + * being issued by trusted CA (`options.ca`). + * + * Earlier versions of Node.js incorrectly accepted certificates for a given`hostname` if a matching `uniformResourceIdentifier` subject alternative name + * was present (see [CVE-2021-44531](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-44531)). Applications that wish to accept`uniformResourceIdentifier` subject alternative names can use + * a custom`options.checkServerIdentity` function that implements the desired behavior. + * @since v0.8.4 + * @param hostname The host name or IP address to verify the certificate against. + * @param cert A `certificate object` representing the peer's certificate. + */ + function checkServerIdentity(hostname: string, cert: PeerCertificate): Error | undefined; + /** + * Creates a new {@link Server}. The `secureConnectionListener`, if provided, is + * automatically set as a listener for the `'secureConnection'` event. + * + * The `ticketKeys` options is automatically shared between `cluster` module + * workers. + * + * The following illustrates a simple echo server: + * + * ```js + * const tls = require('tls'); + * const fs = require('fs'); + * + * const options = { + * key: fs.readFileSync('server-key.pem'), + * cert: fs.readFileSync('server-cert.pem'), + * + * // This is necessary only if using client certificate authentication. + * requestCert: true, + * + * // This is necessary only if the client uses a self-signed certificate. + * ca: [ fs.readFileSync('client-cert.pem') ] + * }; + * + * const server = tls.createServer(options, (socket) => { + * console.log('server connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * socket.write('welcome!\n'); + * socket.setEncoding('utf8'); + * socket.pipe(socket); + * }); + * server.listen(8000, () => { + * console.log('server bound'); + * }); + * ``` + * + * The server can be tested by connecting to it using the example client from {@link connect}. + * @since v0.3.2 + */ + function createServer(secureConnectionListener?: (socket: TLSSocket) => void): Server; + function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; + /** + * The `callback` function, if specified, will be added as a listener for the `'secureConnect'` event. + * + * `tls.connect()` returns a {@link TLSSocket} object. + * + * Unlike the `https` API, `tls.connect()` does not enable the + * SNI (Server Name Indication) extension by default, which may cause some + * servers to return an incorrect certificate or reject the connection + * altogether. To enable SNI, set the `servername` option in addition + * to `host`. + * + * The following illustrates a client for the echo server example from {@link createServer}: + * + * ```js + * // Assumes an echo server that is listening on port 8000. + * const tls = require('tls'); + * const fs = require('fs'); + * + * const options = { + * // Necessary only if the server requires client certificate authentication. + * key: fs.readFileSync('client-key.pem'), + * cert: fs.readFileSync('client-cert.pem'), + * + * // Necessary only if the server uses a self-signed certificate. + * ca: [ fs.readFileSync('server-cert.pem') ], + * + * // Necessary only if the server's cert isn't for "localhost". + * checkServerIdentity: () => { return null; }, + * }; + * + * const socket = tls.connect(8000, options, () => { + * console.log('client connected', + * socket.authorized ? 'authorized' : 'unauthorized'); + * process.stdin.pipe(socket); + * process.stdin.resume(); + * }); + * socket.setEncoding('utf8'); + * socket.on('data', (data) => { + * console.log(data); + * }); + * socket.on('end', () => { + * console.log('server ends connection'); + * }); + * ``` + * @since v0.11.3 + */ + function connect(options: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; + /** + * Creates a new secure pair object with two streams, one of which reads and writes + * the encrypted data and the other of which reads and writes the cleartext data. + * Generally, the encrypted stream is piped to/from an incoming encrypted data + * stream and the cleartext one is used as a replacement for the initial encrypted + * stream. + * + * `tls.createSecurePair()` returns a `tls.SecurePair` object with `cleartext` and`encrypted` stream properties. + * + * Using `cleartext` has the same API as {@link TLSSocket}. + * + * The `tls.createSecurePair()` method is now deprecated in favor of`tls.TLSSocket()`. For example, the code: + * + * ```js + * pair = tls.createSecurePair(// ... ); + * pair.encrypted.pipe(socket); + * socket.pipe(pair.encrypted); + * ``` + * + * can be replaced by: + * + * ```js + * secureSocket = tls.TLSSocket(socket, options); + * ``` + * + * where `secureSocket` has the same API as `pair.cleartext`. + * @since v0.3.2 + * @deprecated Since v0.11.3 - Use {@link TLSSocket} instead. + * @param context A secure context object as returned by `tls.createSecureContext()` + * @param isServer `true` to specify that this TLS connection should be opened as a server. + * @param requestCert `true` to specify whether a server should request a certificate from a connecting client. Only applies when `isServer` is `true`. + * @param rejectUnauthorized If not `false` a server automatically reject clients with invalid certificates. Only applies when `isServer` is `true`. + */ + function createSecurePair(context?: SecureContext, isServer?: boolean, requestCert?: boolean, rejectUnauthorized?: boolean): SecurePair; + /** + * {@link createServer} sets the default value of the `honorCipherOrder` option + * to `true`, other APIs that create secure contexts leave it unset. + * + * {@link createServer} uses a 128 bit truncated SHA1 hash value generated + * from `process.argv` as the default value of the `sessionIdContext` option, other + * APIs that create secure contexts have no default value. + * + * The `tls.createSecureContext()` method creates a `SecureContext` object. It is + * usable as an argument to several `tls` APIs, such as {@link createServer} and `server.addContext()`, but has no public methods. + * + * A key is _required_ for ciphers that use certificates. Either `key` or`pfx` can be used to provide it. + * + * If the `ca` option is not given, then Node.js will default to using [Mozilla's publicly trusted list of + * CAs](https://hg.mozilla.org/mozilla-central/raw-file/tip/security/nss/lib/ckfw/builtins/certdata.txt). + * @since v0.11.13 + */ + function createSecureContext(options?: SecureContextOptions): SecureContext; + /** + * Returns an array with the names of the supported TLS ciphers. The names are + * lower-case for historical reasons, but must be uppercased to be used in + * the `ciphers` option of {@link createSecureContext}. + * + * Not all supported ciphers are enabled by default. See `Modifying the default TLS cipher suite`. + * + * Cipher names that start with `'tls_'` are for TLSv1.3, all the others are for + * TLSv1.2 and below. + * + * ```js + * console.log(tls.getCiphers()); // ['aes128-gcm-sha256', 'aes128-sha', ...] + * ``` + * @since v0.10.2 + */ + function getCiphers(): string[]; + /** + * The default curve name to use for ECDH key agreement in a tls server. + * The default value is 'auto'. See tls.createSecureContext() for further + * information. + */ + let DEFAULT_ECDH_CURVE: string; + /** + * The default value of the maxVersion option of + * tls.createSecureContext(). It can be assigned any of the supported TLS + * protocol versions, 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: + * 'TLSv1.3', unless changed using CLI options. Using --tls-max-v1.2 sets + * the default to 'TLSv1.2'. Using --tls-max-v1.3 sets the default to + * 'TLSv1.3'. If multiple of the options are provided, the highest maximum + * is used. + */ + let DEFAULT_MAX_VERSION: SecureVersion; + /** + * The default value of the minVersion option of tls.createSecureContext(). + * It can be assigned any of the supported TLS protocol versions, + * 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1'. Default: 'TLSv1.2', unless + * changed using CLI options. Using --tls-min-v1.0 sets the default to + * 'TLSv1'. Using --tls-min-v1.1 sets the default to 'TLSv1.1'. Using + * --tls-min-v1.3 sets the default to 'TLSv1.3'. If multiple of the options + * are provided, the lowest minimum is used. + */ + let DEFAULT_MIN_VERSION: SecureVersion; + /** + * An immutable array of strings representing the root certificates (in PEM + * format) used for verifying peer certificates. This is the default value + * of the ca option to tls.createSecureContext(). + */ + const rootCertificates: ReadonlyArray; +} +declare module 'node:tls' { + export * from 'tls'; +} diff --git a/node_modules/@types/node/ts4.8/trace_events.d.ts b/node_modules/@types/node/ts4.8/trace_events.d.ts new file mode 100755 index 0000000..d47aa93 --- /dev/null +++ b/node_modules/@types/node/ts4.8/trace_events.d.ts @@ -0,0 +1,171 @@ +/** + * The `trace_events` module provides a mechanism to centralize tracing information + * generated by V8, Node.js core, and userspace code. + * + * Tracing can be enabled with the `--trace-event-categories` command-line flag + * or by using the `trace_events` module. The `--trace-event-categories` flag + * accepts a list of comma-separated category names. + * + * The available categories are: + * + * * `node`: An empty placeholder. + * * `node.async_hooks`: Enables capture of detailed `async_hooks` trace data. + * The `async_hooks` events have a unique `asyncId` and a special `triggerId` `triggerAsyncId` property. + * * `node.bootstrap`: Enables capture of Node.js bootstrap milestones. + * * `node.console`: Enables capture of `console.time()` and `console.count()`output. + * * `node.dns.native`: Enables capture of trace data for DNS queries. + * * `node.environment`: Enables capture of Node.js Environment milestones. + * * `node.fs.sync`: Enables capture of trace data for file system sync methods. + * * `node.perf`: Enables capture of `Performance API` measurements. + * * `node.perf.usertiming`: Enables capture of only Performance API User Timing + * measures and marks. + * * `node.perf.timerify`: Enables capture of only Performance API timerify + * measurements. + * * `node.promises.rejections`: Enables capture of trace data tracking the number + * of unhandled Promise rejections and handled-after-rejections. + * * `node.vm.script`: Enables capture of trace data for the `vm` module's`runInNewContext()`, `runInContext()`, and `runInThisContext()` methods. + * * `v8`: The `V8` events are GC, compiling, and execution related. + * + * By default the `node`, `node.async_hooks`, and `v8` categories are enabled. + * + * ```bash + * node --trace-event-categories v8,node,node.async_hooks server.js + * ``` + * + * Prior versions of Node.js required the use of the `--trace-events-enabled`flag to enable trace events. This requirement has been removed. However, the`--trace-events-enabled` flag _may_ still be + * used and will enable the`node`, `node.async_hooks`, and `v8` trace event categories by default. + * + * ```bash + * node --trace-events-enabled + * + * # is equivalent to + * + * node --trace-event-categories v8,node,node.async_hooks + * ``` + * + * Alternatively, trace events may be enabled using the `trace_events` module: + * + * ```js + * const trace_events = require('trace_events'); + * const tracing = trace_events.createTracing({ categories: ['node.perf'] }); + * tracing.enable(); // Enable trace event capture for the 'node.perf' category + * + * // do work + * + * tracing.disable(); // Disable trace event capture for the 'node.perf' category + * ``` + * + * Running Node.js with tracing enabled will produce log files that can be opened + * in the [`chrome://tracing`](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) tab of Chrome. + * + * The logging file is by default called `node_trace.${rotation}.log`, where`${rotation}` is an incrementing log-rotation id. The filepath pattern can + * be specified with `--trace-event-file-pattern` that accepts a template + * string that supports `${rotation}` and `${pid}`: + * + * ```bash + * node --trace-event-categories v8 --trace-event-file-pattern '${pid}-${rotation}.log' server.js + * ``` + * + * To guarantee that the log file is properly generated after signal events like`SIGINT`, `SIGTERM`, or `SIGBREAK`, make sure to have the appropriate handlers + * in your code, such as: + * + * ```js + * process.on('SIGINT', function onSigint() { + * console.info('Received SIGINT.'); + * process.exit(130); // Or applicable exit code depending on OS and signal + * }); + * ``` + * + * The tracing system uses the same time source + * as the one used by `process.hrtime()`. + * However the trace-event timestamps are expressed in microseconds, + * unlike `process.hrtime()` which returns nanoseconds. + * + * The features from this module are not available in `Worker` threads. + * @experimental + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/trace_events.js) + */ +declare module 'trace_events' { + /** + * The `Tracing` object is used to enable or disable tracing for sets of + * categories. Instances are created using the + * `trace_events.createTracing()` method. + * + * When created, the `Tracing` object is disabled. Calling the + * `tracing.enable()` method adds the categories to the set of enabled trace + * event categories. Calling `tracing.disable()` will remove the categories + * from the set of enabled trace event categories. + */ + interface Tracing { + /** + * A comma-separated list of the trace event categories covered by this + * `Tracing` object. + */ + readonly categories: string; + /** + * Disables this `Tracing` object. + * + * Only trace event categories _not_ covered by other enabled `Tracing` + * objects and _not_ specified by the `--trace-event-categories` flag + * will be disabled. + */ + disable(): void; + /** + * Enables this `Tracing` object for the set of categories covered by + * the `Tracing` object. + */ + enable(): void; + /** + * `true` only if the `Tracing` object has been enabled. + */ + readonly enabled: boolean; + } + interface CreateTracingOptions { + /** + * An array of trace category names. Values included in the array are + * coerced to a string when possible. An error will be thrown if the + * value cannot be coerced. + */ + categories: string[]; + } + /** + * Creates and returns a `Tracing` object for the given set of `categories`. + * + * ```js + * const trace_events = require('trace_events'); + * const categories = ['node.perf', 'node.async_hooks']; + * const tracing = trace_events.createTracing({ categories }); + * tracing.enable(); + * // do stuff + * tracing.disable(); + * ``` + * @since v10.0.0 + * @return . + */ + function createTracing(options: CreateTracingOptions): Tracing; + /** + * Returns a comma-separated list of all currently-enabled trace event + * categories. The current set of enabled trace event categories is determined + * by the _union_ of all currently-enabled `Tracing` objects and any categories + * enabled using the `--trace-event-categories` flag. + * + * Given the file `test.js` below, the command`node --trace-event-categories node.perf test.js` will print`'node.async_hooks,node.perf'` to the console. + * + * ```js + * const trace_events = require('trace_events'); + * const t1 = trace_events.createTracing({ categories: ['node.async_hooks'] }); + * const t2 = trace_events.createTracing({ categories: ['node.perf'] }); + * const t3 = trace_events.createTracing({ categories: ['v8'] }); + * + * t1.enable(); + * t2.enable(); + * + * console.log(trace_events.getEnabledCategories()); + * ``` + * @since v10.0.0 + */ + function getEnabledCategories(): string | undefined; +} +declare module 'node:trace_events' { + export * from 'trace_events'; +} diff --git a/node_modules/@types/node/ts4.8/tty.d.ts b/node_modules/@types/node/ts4.8/tty.d.ts new file mode 100755 index 0000000..6473f8d --- /dev/null +++ b/node_modules/@types/node/ts4.8/tty.d.ts @@ -0,0 +1,206 @@ +/** + * The `tty` module provides the `tty.ReadStream` and `tty.WriteStream` classes. + * In most cases, it will not be necessary or possible to use this module directly. + * However, it can be accessed using: + * + * ```js + * const tty = require('tty'); + * ``` + * + * When Node.js detects that it is being run with a text terminal ("TTY") + * attached, `process.stdin` will, by default, be initialized as an instance of`tty.ReadStream` and both `process.stdout` and `process.stderr` will, by + * default, be instances of `tty.WriteStream`. The preferred method of determining + * whether Node.js is being run within a TTY context is to check that the value of + * the `process.stdout.isTTY` property is `true`: + * + * ```console + * $ node -p -e "Boolean(process.stdout.isTTY)" + * true + * $ node -p -e "Boolean(process.stdout.isTTY)" | cat + * false + * ``` + * + * In most cases, there should be little to no reason for an application to + * manually create instances of the `tty.ReadStream` and `tty.WriteStream`classes. + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/tty.js) + */ +declare module 'tty' { + import * as net from 'node:net'; + /** + * The `tty.isatty()` method returns `true` if the given `fd` is associated with + * a TTY and `false` if it is not, including whenever `fd` is not a non-negative + * integer. + * @since v0.5.8 + * @param fd A numeric file descriptor + */ + function isatty(fd: number): boolean; + /** + * Represents the readable side of a TTY. In normal circumstances `process.stdin` will be the only `tty.ReadStream` instance in a Node.js + * process and there should be no reason to create additional instances. + * @since v0.5.8 + */ + class ReadStream extends net.Socket { + constructor(fd: number, options?: net.SocketConstructorOpts); + /** + * A `boolean` that is `true` if the TTY is currently configured to operate as a + * raw device. Defaults to `false`. + * @since v0.7.7 + */ + isRaw: boolean; + /** + * Allows configuration of `tty.ReadStream` so that it operates as a raw device. + * + * When in raw mode, input is always available character-by-character, not + * including modifiers. Additionally, all special processing of characters by the + * terminal is disabled, including echoing input + * characters. Ctrl+C will no longer cause a `SIGINT` when + * in this mode. + * @since v0.7.7 + * @param mode If `true`, configures the `tty.ReadStream` to operate as a raw device. If `false`, configures the `tty.ReadStream` to operate in its default mode. The `readStream.isRaw` + * property will be set to the resulting mode. + * @return The read stream instance. + */ + setRawMode(mode: boolean): this; + /** + * A `boolean` that is always `true` for `tty.ReadStream` instances. + * @since v0.5.8 + */ + isTTY: boolean; + } + /** + * -1 - to the left from cursor + * 0 - the entire line + * 1 - to the right from cursor + */ + type Direction = -1 | 0 | 1; + /** + * Represents the writable side of a TTY. In normal circumstances,`process.stdout` and `process.stderr` will be the only`tty.WriteStream` instances created for a Node.js process and there + * should be no reason to create additional instances. + * @since v0.5.8 + */ + class WriteStream extends net.Socket { + constructor(fd: number); + addListener(event: string, listener: (...args: any[]) => void): this; + addListener(event: 'resize', listener: () => void): this; + emit(event: string | symbol, ...args: any[]): boolean; + emit(event: 'resize'): boolean; + on(event: string, listener: (...args: any[]) => void): this; + on(event: 'resize', listener: () => void): this; + once(event: string, listener: (...args: any[]) => void): this; + once(event: 'resize', listener: () => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependListener(event: 'resize', listener: () => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: 'resize', listener: () => void): this; + /** + * `writeStream.clearLine()` clears the current line of this `WriteStream` in a + * direction identified by `dir`. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearLine(dir: Direction, callback?: () => void): boolean; + /** + * `writeStream.clearScreenDown()` clears this `WriteStream` from the current + * cursor down. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + clearScreenDown(callback?: () => void): boolean; + /** + * `writeStream.cursorTo()` moves this `WriteStream`'s cursor to the specified + * position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + cursorTo(x: number, y?: number, callback?: () => void): boolean; + cursorTo(x: number, callback: () => void): boolean; + /** + * `writeStream.moveCursor()` moves this `WriteStream`'s cursor _relative_ to its + * current position. + * @since v0.7.7 + * @param callback Invoked once the operation completes. + * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. + */ + moveCursor(dx: number, dy: number, callback?: () => void): boolean; + /** + * Returns: + * + * * `1` for 2, + * * `4` for 16, + * * `8` for 256, + * * `24` for 16,777,216 colors supported. + * + * Use this to determine what colors the terminal supports. Due to the nature of + * colors in terminals it is possible to either have false positives or false + * negatives. It depends on process information and the environment variables that + * may lie about what terminal is used. + * It is possible to pass in an `env` object to simulate the usage of a specific + * terminal. This can be useful to check how specific environment settings behave. + * + * To enforce a specific color support, use one of the below environment settings. + * + * * 2 colors: `FORCE_COLOR = 0` (Disables colors) + * * 16 colors: `FORCE_COLOR = 1` + * * 256 colors: `FORCE_COLOR = 2` + * * 16,777,216 colors: `FORCE_COLOR = 3` + * + * Disabling color support is also possible by using the `NO_COLOR` and`NODE_DISABLE_COLORS` environment variables. + * @since v9.9.0 + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + getColorDepth(env?: object): number; + /** + * Returns `true` if the `writeStream` supports at least as many colors as provided + * in `count`. Minimum support is 2 (black and white). + * + * This has the same false positives and negatives as described in `writeStream.getColorDepth()`. + * + * ```js + * process.stdout.hasColors(); + * // Returns true or false depending on if `stdout` supports at least 16 colors. + * process.stdout.hasColors(256); + * // Returns true or false depending on if `stdout` supports at least 256 colors. + * process.stdout.hasColors({ TMUX: '1' }); + * // Returns true. + * process.stdout.hasColors(2 ** 24, { TMUX: '1' }); + * // Returns false (the environment setting pretends to support 2 ** 8 colors). + * ``` + * @since v11.13.0, v10.16.0 + * @param [count=16] The number of colors that are requested (minimum 2). + * @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal. + */ + hasColors(count?: number): boolean; + hasColors(env?: object): boolean; + hasColors(count: number, env?: object): boolean; + /** + * `writeStream.getWindowSize()` returns the size of the TTY + * corresponding to this `WriteStream`. The array is of the type`[numColumns, numRows]` where `numColumns` and `numRows` represent the number + * of columns and rows in the corresponding TTY. + * @since v0.7.7 + */ + getWindowSize(): [number, number]; + /** + * A `number` specifying the number of columns the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + columns: number; + /** + * A `number` specifying the number of rows the TTY currently has. This property + * is updated whenever the `'resize'` event is emitted. + * @since v0.7.7 + */ + rows: number; + /** + * A `boolean` that is always `true`. + * @since v0.5.8 + */ + isTTY: boolean; + } +} +declare module 'node:tty' { + export * from 'tty'; +} diff --git a/node_modules/@types/node/ts4.8/url.d.ts b/node_modules/@types/node/ts4.8/url.d.ts new file mode 100755 index 0000000..e172acb --- /dev/null +++ b/node_modules/@types/node/ts4.8/url.d.ts @@ -0,0 +1,897 @@ +/** + * The `url` module provides utilities for URL resolution and parsing. It can be + * accessed using: + * + * ```js + * import url from 'url'; + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/url.js) + */ +declare module 'url' { + import { Blob as NodeBlob } from 'node:buffer'; + import { ClientRequestArgs } from 'node:http'; + import { ParsedUrlQuery, ParsedUrlQueryInput } from 'node:querystring'; + // Input to `url.format` + interface UrlObject { + auth?: string | null | undefined; + hash?: string | null | undefined; + host?: string | null | undefined; + hostname?: string | null | undefined; + href?: string | null | undefined; + pathname?: string | null | undefined; + protocol?: string | null | undefined; + search?: string | null | undefined; + slashes?: boolean | null | undefined; + port?: string | number | null | undefined; + query?: string | null | ParsedUrlQueryInput | undefined; + } + // Output of `url.parse` + interface Url { + auth: string | null; + hash: string | null; + host: string | null; + hostname: string | null; + href: string; + path: string | null; + pathname: string | null; + protocol: string | null; + search: string | null; + slashes: boolean | null; + port: string | null; + query: string | null | ParsedUrlQuery; + } + interface UrlWithParsedQuery extends Url { + query: ParsedUrlQuery; + } + interface UrlWithStringQuery extends Url { + query: string | null; + } + /** + * The `url.parse()` method takes a URL string, parses it, and returns a URL + * object. + * + * A `TypeError` is thrown if `urlString` is not a string. + * + * A `URIError` is thrown if the `auth` property is present but cannot be decoded. + * + * Use of the legacy `url.parse()` method is discouraged. Users should + * use the WHATWG `URL` API. Because the `url.parse()` method uses a + * lenient, non-standard algorithm for parsing URL strings, security + * issues can be introduced. Specifically, issues with [host name spoofing](https://hackerone.com/reports/678487) and + * incorrect handling of usernames and passwords have been identified. + * + * Deprecation of this API has been shelved for now primarily due to the the + * inability of the [WHATWG API to parse relative URLs](https://github.com/nodejs/node/issues/12682#issuecomment-1154492373). + * [Discussions are ongoing](https://github.com/whatwg/url/issues/531) for the best way to resolve this. + * + * @since v0.1.25 + * @param urlString The URL string to parse. + * @param [parseQueryString=false] If `true`, the `query` property will always be set to an object returned by the {@link querystring} module's `parse()` method. If `false`, the `query` property + * on the returned URL object will be an unparsed, undecoded string. + * @param [slashesDenoteHost=false] If `true`, the first token after the literal string `//` and preceding the next `/` will be interpreted as the `host`. For instance, given `//foo/bar`, the + * result would be `{host: 'foo', pathname: '/bar'}` rather than `{pathname: '//foo/bar'}`. + */ + function parse(urlString: string): UrlWithStringQuery; + function parse(urlString: string, parseQueryString: false | undefined, slashesDenoteHost?: boolean): UrlWithStringQuery; + function parse(urlString: string, parseQueryString: true, slashesDenoteHost?: boolean): UrlWithParsedQuery; + function parse(urlString: string, parseQueryString: boolean, slashesDenoteHost?: boolean): Url; + /** + * The `url.format()` method returns a formatted URL string derived from`urlObject`. + * + * ```js + * const url = require('url'); + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json' + * } + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//`will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or`file`; + * * If the value of the `urlObject.auth` property is truthy, and either`urlObject.host` or `urlObject.hostname` are not `undefined`, the value of`urlObject.auth` will be coerced into a string + * and appended to `result`followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname`is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to`result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of`urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname`_does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result`followed by the output of calling the + * `querystring` module's `stringify()`method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search`_does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash`_does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @deprecated Legacy: Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: URL, options?: URLFormatOptions): string; + /** + * The `url.format()` method returns a formatted URL string derived from`urlObject`. + * + * ```js + * const url = require('url'); + * url.format({ + * protocol: 'https', + * hostname: 'example.com', + * pathname: '/some/path', + * query: { + * page: 1, + * format: 'json' + * } + * }); + * + * // => 'https://example.com/some/path?page=1&format=json' + * ``` + * + * If `urlObject` is not an object or a string, `url.format()` will throw a `TypeError`. + * + * The formatting process operates as follows: + * + * * A new empty string `result` is created. + * * If `urlObject.protocol` is a string, it is appended as-is to `result`. + * * Otherwise, if `urlObject.protocol` is not `undefined` and is not a string, an `Error` is thrown. + * * For all string values of `urlObject.protocol` that _do not end_ with an ASCII + * colon (`:`) character, the literal string `:` will be appended to `result`. + * * If either of the following conditions is true, then the literal string `//`will be appended to `result`: + * * `urlObject.slashes` property is true; + * * `urlObject.protocol` begins with `http`, `https`, `ftp`, `gopher`, or`file`; + * * If the value of the `urlObject.auth` property is truthy, and either`urlObject.host` or `urlObject.hostname` are not `undefined`, the value of`urlObject.auth` will be coerced into a string + * and appended to `result`followed by the literal string `@`. + * * If the `urlObject.host` property is `undefined` then: + * * If the `urlObject.hostname` is a string, it is appended to `result`. + * * Otherwise, if `urlObject.hostname` is not `undefined` and is not a string, + * an `Error` is thrown. + * * If the `urlObject.port` property value is truthy, and `urlObject.hostname`is not `undefined`: + * * The literal string `:` is appended to `result`, and + * * The value of `urlObject.port` is coerced to a string and appended to`result`. + * * Otherwise, if the `urlObject.host` property value is truthy, the value of`urlObject.host` is coerced to a string and appended to `result`. + * * If the `urlObject.pathname` property is a string that is not an empty string: + * * If the `urlObject.pathname`_does not start_ with an ASCII forward slash + * (`/`), then the literal string `'/'` is appended to `result`. + * * The value of `urlObject.pathname` is appended to `result`. + * * Otherwise, if `urlObject.pathname` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.search` property is `undefined` and if the `urlObject.query`property is an `Object`, the literal string `?` is appended to `result`followed by the output of calling the + * `querystring` module's `stringify()`method passing the value of `urlObject.query`. + * * Otherwise, if `urlObject.search` is a string: + * * If the value of `urlObject.search`_does not start_ with the ASCII question + * mark (`?`) character, the literal string `?` is appended to `result`. + * * The value of `urlObject.search` is appended to `result`. + * * Otherwise, if `urlObject.search` is not `undefined` and is not a string, an `Error` is thrown. + * * If the `urlObject.hash` property is a string: + * * If the value of `urlObject.hash`_does not start_ with the ASCII hash (`#`) + * character, the literal string `#` is appended to `result`. + * * The value of `urlObject.hash` is appended to `result`. + * * Otherwise, if the `urlObject.hash` property is not `undefined` and is not a + * string, an `Error` is thrown. + * * `result` is returned. + * @since v0.1.25 + * @deprecated Legacy: Use the WHATWG URL API instead. + * @param urlObject A URL object (as returned by `url.parse()` or constructed otherwise). If a string, it is converted to an object by passing it to `url.parse()`. + */ + function format(urlObject: UrlObject | string): string; + /** + * The `url.resolve()` method resolves a target URL relative to a base URL in a + * manner similar to that of a web browser resolving an anchor tag. + * + * ```js + * const url = require('url'); + * url.resolve('/one/two/three', 'four'); // '/one/two/four' + * url.resolve('http://example.com/', '/one'); // 'http://example.com/one' + * url.resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * + * To achieve the same result using the WHATWG URL API: + * + * ```js + * function resolve(from, to) { + * const resolvedUrl = new URL(to, new URL(from, 'resolve://')); + * if (resolvedUrl.protocol === 'resolve:') { + * // `from` is a relative URL. + * const { pathname, search, hash } = resolvedUrl; + * return pathname + search + hash; + * } + * return resolvedUrl.toString(); + * } + * + * resolve('/one/two/three', 'four'); // '/one/two/four' + * resolve('http://example.com/', '/one'); // 'http://example.com/one' + * resolve('http://example.com/one', '/two'); // 'http://example.com/two' + * ``` + * @since v0.1.25 + * @deprecated Legacy: Use the WHATWG URL API instead. + * @param from The base URL to use if `to` is a relative URL. + * @param to The target URL to resolve. + */ + function resolve(from: string, to: string): string; + /** + * Returns the [Punycode](https://tools.ietf.org/html/rfc5891#section-4.4) ASCII serialization of the `domain`. If `domain` is an + * invalid domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToUnicode}. + * + * This feature is only available if the `node` executable was compiled with `ICU` enabled. If not, the domain names are passed through unchanged. + * + * ```js + * import url from 'url'; + * + * console.log(url.domainToASCII('español.com')); + * // Prints xn--espaol-zwa.com + * console.log(url.domainToASCII('中文.com')); + * // Prints xn--fiq228c.com + * console.log(url.domainToASCII('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToASCII(domain: string): string; + /** + * Returns the Unicode serialization of the `domain`. If `domain` is an invalid + * domain, the empty string is returned. + * + * It performs the inverse operation to {@link domainToASCII}. + * + * This feature is only available if the `node` executable was compiled with `ICU` enabled. If not, the domain names are passed through unchanged. + * + * ```js + * import url from 'url'; + * + * console.log(url.domainToUnicode('xn--espaol-zwa.com')); + * // Prints español.com + * console.log(url.domainToUnicode('xn--fiq228c.com')); + * // Prints 中文.com + * console.log(url.domainToUnicode('xn--iñvalid.com')); + * // Prints an empty string + * ``` + * @since v7.4.0, v6.13.0 + */ + function domainToUnicode(domain: string): string; + /** + * This function ensures the correct decodings of percent-encoded characters as + * well as ensuring a cross-platform valid absolute path string. + * + * ```js + * import { fileURLToPath } from 'url'; + * + * const __filename = fileURLToPath(import.meta.url); + * + * new URL('file:///C:/path/').pathname; // Incorrect: /C:/path/ + * fileURLToPath('file:///C:/path/'); // Correct: C:\path\ (Windows) + * + * new URL('file://nas/foo.txt').pathname; // Incorrect: /foo.txt + * fileURLToPath('file://nas/foo.txt'); // Correct: \\nas\foo.txt (Windows) + * + * new URL('file:///你好.txt').pathname; // Incorrect: /%E4%BD%A0%E5%A5%BD.txt + * fileURLToPath('file:///你好.txt'); // Correct: /你好.txt (POSIX) + * + * new URL('file:///hello world').pathname; // Incorrect: /hello%20world + * fileURLToPath('file:///hello world'); // Correct: /hello world (POSIX) + * ``` + * @since v10.12.0 + * @param url The file URL string or URL object to convert to a path. + * @return The fully-resolved platform-specific Node.js file path. + */ + function fileURLToPath(url: string | URL): string; + /** + * This function ensures that `path` is resolved absolutely, and that the URL + * control characters are correctly encoded when converting into a File URL. + * + * ```js + * import { pathToFileURL } from 'url'; + * + * new URL('/foo#1', 'file:'); // Incorrect: file:///foo#1 + * pathToFileURL('/foo#1'); // Correct: file:///foo%231 (POSIX) + * + * new URL('/some/path%.c', 'file:'); // Incorrect: file:///some/path%.c + * pathToFileURL('/some/path%.c'); // Correct: file:///some/path%25.c (POSIX) + * ``` + * @since v10.12.0 + * @param path The path to convert to a File URL. + * @return The file URL object. + */ + function pathToFileURL(path: string): URL; + /** + * This utility function converts a URL object into an ordinary options object as + * expected by the `http.request()` and `https.request()` APIs. + * + * ```js + * import { urlToHttpOptions } from 'url'; + * const myURL = new URL('https://a:b@測試?abc#foo'); + * + * console.log(urlToHttpOptions(myURL)); + * /* + * { + * protocol: 'https:', + * hostname: 'xn--g6w251d', + * hash: '#foo', + * search: '?abc', + * pathname: '/', + * path: '/?abc', + * href: 'https://a:b@xn--g6w251d/?abc#foo', + * auth: 'a:b' + * } + * + * ``` + * @since v15.7.0, v14.18.0 + * @param url The `WHATWG URL` object to convert to an options object. + * @return Options object + */ + function urlToHttpOptions(url: URL): ClientRequestArgs; + interface URLFormatOptions { + auth?: boolean | undefined; + fragment?: boolean | undefined; + search?: boolean | undefined; + unicode?: boolean | undefined; + } + /** + * Browser-compatible `URL` class, implemented by following the WHATWG URL + * Standard. [Examples of parsed URLs](https://url.spec.whatwg.org/#example-url-parsing) may be found in the Standard itself. + * The `URL` class is also available on the global object. + * + * In accordance with browser conventions, all properties of `URL` objects + * are implemented as getters and setters on the class prototype, rather than as + * data properties on the object itself. Thus, unlike `legacy urlObject` s, + * using the `delete` keyword on any properties of `URL` objects (e.g. `delete myURL.protocol`, `delete myURL.pathname`, etc) has no effect but will still + * return `true`. + * @since v7.0.0, v6.13.0 + */ + class URL { + /** + * Creates a `'blob:nodedata:...'` URL string that represents the given `Blob` object and can be used to retrieve the `Blob` later. + * + * ```js + * const { + * Blob, + * resolveObjectURL, + * } = require('buffer'); + * + * const blob = new Blob(['hello']); + * const id = URL.createObjectURL(blob); + * + * // later... + * + * const otherBlob = resolveObjectURL(id); + * console.log(otherBlob.size); + * ``` + * + * The data stored by the registered `Blob` will be retained in memory until`URL.revokeObjectURL()` is called to remove it. + * + * `Blob` objects are registered within the current thread. If using Worker + * Threads, `Blob` objects registered within one Worker will not be available + * to other workers or the main thread. + * @since v16.7.0 + * @experimental + */ + static createObjectURL(blob: NodeBlob): string; + /** + * Removes the stored `Blob` identified by the given ID. Attempting to revoke a + * ID that isn’t registered will silently fail. + * @since v16.7.0 + * @experimental + * @param id A `'blob:nodedata:...` URL string returned by a prior call to `URL.createObjectURL()`. + */ + static revokeObjectURL(objectUrl: string): void; + constructor(input: string, base?: string | URL); + /** + * Gets and sets the fragment portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/foo#bar'); + * console.log(myURL.hash); + * // Prints #bar + * + * myURL.hash = 'baz'; + * console.log(myURL.href); + * // Prints https://example.org/foo#baz + * ``` + * + * Invalid URL characters included in the value assigned to the `hash` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + hash: string; + /** + * Gets and sets the host portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.host); + * // Prints example.org:81 + * + * myURL.host = 'example.com:82'; + * console.log(myURL.href); + * // Prints https://example.com:82/foo + * ``` + * + * Invalid host values assigned to the `host` property are ignored. + */ + host: string; + /** + * Gets and sets the host name portion of the URL. The key difference between`url.host` and `url.hostname` is that `url.hostname` does _not_ include the + * port. + * + * ```js + * const myURL = new URL('https://example.org:81/foo'); + * console.log(myURL.hostname); + * // Prints example.org + * + * // Setting the hostname does not change the port + * myURL.hostname = 'example.com:82'; + * console.log(myURL.href); + * // Prints https://example.com:81/foo + * + * // Use myURL.host to change the hostname and port + * myURL.host = 'example.org:82'; + * console.log(myURL.href); + * // Prints https://example.org:82/foo + * ``` + * + * Invalid host name values assigned to the `hostname` property are ignored. + */ + hostname: string; + /** + * Gets and sets the serialized URL. + * + * ```js + * const myURL = new URL('https://example.org/foo'); + * console.log(myURL.href); + * // Prints https://example.org/foo + * + * myURL.href = 'https://example.com/bar'; + * console.log(myURL.href); + * // Prints https://example.com/bar + * ``` + * + * Getting the value of the `href` property is equivalent to calling {@link toString}. + * + * Setting the value of this property to a new value is equivalent to creating a + * new `URL` object using `new URL(value)`. Each of the `URL`object's properties will be modified. + * + * If the value assigned to the `href` property is not a valid URL, a `TypeError`will be thrown. + */ + href: string; + /** + * Gets the read-only serialization of the URL's origin. + * + * ```js + * const myURL = new URL('https://example.org/foo/bar?baz'); + * console.log(myURL.origin); + * // Prints https://example.org + * ``` + * + * ```js + * const idnURL = new URL('https://測試'); + * console.log(idnURL.origin); + * // Prints https://xn--g6w251d + * + * console.log(idnURL.hostname); + * // Prints xn--g6w251d + * ``` + */ + readonly origin: string; + /** + * Gets and sets the password portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.password); + * // Prints xyz + * + * myURL.password = '123'; + * console.log(myURL.href); + * // Prints https://abc:123@example.com + * ``` + * + * Invalid URL characters included in the value assigned to the `password` property + * are `percent-encoded`. The selection of which characters to + * percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + password: string; + /** + * Gets and sets the path portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc/xyz?123'); + * console.log(myURL.pathname); + * // Prints /abc/xyz + * + * myURL.pathname = '/abcdef'; + * console.log(myURL.href); + * // Prints https://example.org/abcdef?123 + * ``` + * + * Invalid URL characters included in the value assigned to the `pathname`property are `percent-encoded`. The selection of which characters + * to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + pathname: string; + /** + * Gets and sets the port portion of the URL. + * + * The port value may be a number or a string containing a number in the range`0` to `65535` (inclusive). Setting the value to the default port of the`URL` objects given `protocol` will + * result in the `port` value becoming + * the empty string (`''`). + * + * The port value can be an empty string in which case the port depends on + * the protocol/scheme: + * + * + * + * Upon assigning a value to the port, the value will first be converted to a + * string using `.toString()`. + * + * If that string is invalid but it begins with a number, the leading number is + * assigned to `port`. + * If the number lies outside the range denoted above, it is ignored. + * + * ```js + * const myURL = new URL('https://example.org:8888'); + * console.log(myURL.port); + * // Prints 8888 + * + * // Default ports are automatically transformed to the empty string + * // (HTTPS protocol's default port is 443) + * myURL.port = '443'; + * console.log(myURL.port); + * // Prints the empty string + * console.log(myURL.href); + * // Prints https://example.org/ + * + * myURL.port = 1234; + * console.log(myURL.port); + * // Prints 1234 + * console.log(myURL.href); + * // Prints https://example.org:1234/ + * + * // Completely invalid port strings are ignored + * myURL.port = 'abcd'; + * console.log(myURL.port); + * // Prints 1234 + * + * // Leading numbers are treated as a port number + * myURL.port = '5678abcd'; + * console.log(myURL.port); + * // Prints 5678 + * + * // Non-integers are truncated + * myURL.port = 1234.5678; + * console.log(myURL.port); + * // Prints 1234 + * + * // Out-of-range numbers which are not represented in scientific notation + * // will be ignored. + * myURL.port = 1e10; // 10000000000, will be range-checked as described below + * console.log(myURL.port); + * // Prints 1234 + * ``` + * + * Numbers which contain a decimal point, + * such as floating-point numbers or numbers in scientific notation, + * are not an exception to this rule. + * Leading numbers up to the decimal point will be set as the URL's port, + * assuming they are valid: + * + * ```js + * myURL.port = 4.567e21; + * console.log(myURL.port); + * // Prints 4 (because it is the leading number in the string '4.567e21') + * ``` + */ + port: string; + /** + * Gets and sets the protocol portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org'); + * console.log(myURL.protocol); + * // Prints https: + * + * myURL.protocol = 'ftp'; + * console.log(myURL.href); + * // Prints ftp://example.org/ + * ``` + * + * Invalid URL protocol values assigned to the `protocol` property are ignored. + */ + protocol: string; + /** + * Gets and sets the serialized query portion of the URL. + * + * ```js + * const myURL = new URL('https://example.org/abc?123'); + * console.log(myURL.search); + * // Prints ?123 + * + * myURL.search = 'abc=xyz'; + * console.log(myURL.href); + * // Prints https://example.org/abc?abc=xyz + * ``` + * + * Any invalid URL characters appearing in the value assigned the `search`property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + search: string; + /** + * Gets the `URLSearchParams` object representing the query parameters of the + * URL. This property is read-only but the `URLSearchParams` object it provides + * can be used to mutate the URL instance; to replace the entirety of query + * parameters of the URL, use the {@link search} setter. See `URLSearchParams` documentation for details. + * + * Use care when using `.searchParams` to modify the `URL` because, + * per the WHATWG specification, the `URLSearchParams` object uses + * different rules to determine which characters to percent-encode. For + * instance, the `URL` object will not percent encode the ASCII tilde (`~`) + * character, while `URLSearchParams` will always encode it: + * + * ```js + * const myUrl = new URL('https://example.org/abc?foo=~bar'); + * + * console.log(myUrl.search); // prints ?foo=~bar + * + * // Modify the URL via searchParams... + * myUrl.searchParams.sort(); + * + * console.log(myUrl.search); // prints ?foo=%7Ebar + * ``` + */ + readonly searchParams: URLSearchParams; + /** + * Gets and sets the username portion of the URL. + * + * ```js + * const myURL = new URL('https://abc:xyz@example.com'); + * console.log(myURL.username); + * // Prints abc + * + * myURL.username = '123'; + * console.log(myURL.href); + * // Prints https://123:xyz@example.com/ + * ``` + * + * Any invalid URL characters appearing in the value assigned the `username`property will be `percent-encoded`. The selection of which + * characters to percent-encode may vary somewhat from what the {@link parse} and {@link format} methods would produce. + */ + username: string; + /** + * The `toString()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toJSON}. + */ + toString(): string; + /** + * The `toJSON()` method on the `URL` object returns the serialized URL. The + * value returned is equivalent to that of {@link href} and {@link toString}. + * + * This method is automatically called when an `URL` object is serialized + * with [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). + * + * ```js + * const myURLs = [ + * new URL('https://www.example.com'), + * new URL('https://test.example.org'), + * ]; + * console.log(JSON.stringify(myURLs)); + * // Prints ["https://www.example.com/","https://test.example.org/"] + * ``` + */ + toJSON(): string; + } + /** + * The `URLSearchParams` API provides read and write access to the query of a`URL`. The `URLSearchParams` class can also be used standalone with one of the + * four following constructors. + * The `URLSearchParams` class is also available on the global object. + * + * The WHATWG `URLSearchParams` interface and the `querystring` module have + * similar purpose, but the purpose of the `querystring` module is more + * general, as it allows the customization of delimiter characters (`&` and `=`). + * On the other hand, this API is designed purely for URL query strings. + * + * ```js + * const myURL = new URL('https://example.org/?abc=123'); + * console.log(myURL.searchParams.get('abc')); + * // Prints 123 + * + * myURL.searchParams.append('abc', 'xyz'); + * console.log(myURL.href); + * // Prints https://example.org/?abc=123&abc=xyz + * + * myURL.searchParams.delete('abc'); + * myURL.searchParams.set('a', 'b'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * + * const newSearchParams = new URLSearchParams(myURL.searchParams); + * // The above is equivalent to + * // const newSearchParams = new URLSearchParams(myURL.search); + * + * newSearchParams.append('a', 'c'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b + * console.log(newSearchParams.toString()); + * // Prints a=b&a=c + * + * // newSearchParams.toString() is implicitly called + * myURL.search = newSearchParams; + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * newSearchParams.delete('a'); + * console.log(myURL.href); + * // Prints https://example.org/?a=b&a=c + * ``` + * @since v7.5.0, v6.13.0 + */ + class URLSearchParams implements Iterable<[string, string]> { + constructor(init?: URLSearchParams | string | Record> | Iterable<[string, string]> | ReadonlyArray<[string, string]>); + /** + * Append a new name-value pair to the query string. + */ + append(name: string, value: string): void; + /** + * Remove all name-value pairs whose name is `name`. + */ + delete(name: string): void; + /** + * Returns an ES6 `Iterator` over each of the name-value pairs in the query. + * Each item of the iterator is a JavaScript `Array`. The first item of the `Array`is the `name`, the second item of the `Array` is the `value`. + * + * Alias for `urlSearchParams[@@iterator]()`. + */ + entries(): IterableIterator<[string, string]>; + /** + * Iterates over each name-value pair in the query and invokes the given function. + * + * ```js + * const myURL = new URL('https://example.org/?a=b&c=d'); + * myURL.searchParams.forEach((value, name, searchParams) => { + * console.log(name, value, myURL.searchParams === searchParams); + * }); + * // Prints: + * // a b true + * // c d true + * ``` + * @param fn Invoked for each name-value pair in the query + * @param thisArg To be used as `this` value for when `fn` is called + */ + forEach(callback: (this: TThis, value: string, name: string, searchParams: URLSearchParams) => void, thisArg?: TThis): void; + /** + * Returns the value of the first name-value pair whose name is `name`. If there + * are no such pairs, `null` is returned. + * @return or `null` if there is no name-value pair with the given `name`. + */ + get(name: string): string | null; + /** + * Returns the values of all name-value pairs whose name is `name`. If there are + * no such pairs, an empty array is returned. + */ + getAll(name: string): string[]; + /** + * Returns `true` if there is at least one name-value pair whose name is `name`. + */ + has(name: string): boolean; + /** + * Returns an ES6 `Iterator` over the names of each name-value pair. + * + * ```js + * const params = new URLSearchParams('foo=bar&foo=baz'); + * for (const name of params.keys()) { + * console.log(name); + * } + * // Prints: + * // foo + * // foo + * ``` + */ + keys(): IterableIterator; + /** + * Sets the value in the `URLSearchParams` object associated with `name` to`value`. If there are any pre-existing name-value pairs whose names are `name`, + * set the first such pair's value to `value` and remove all others. If not, + * append the name-value pair to the query string. + * + * ```js + * const params = new URLSearchParams(); + * params.append('foo', 'bar'); + * params.append('foo', 'baz'); + * params.append('abc', 'def'); + * console.log(params.toString()); + * // Prints foo=bar&foo=baz&abc=def + * + * params.set('foo', 'def'); + * params.set('xyz', 'opq'); + * console.log(params.toString()); + * // Prints foo=def&abc=def&xyz=opq + * ``` + */ + set(name: string, value: string): void; + /** + * Sort all existing name-value pairs in-place by their names. Sorting is done + * with a [stable sorting algorithm](https://en.wikipedia.org/wiki/Sorting_algorithm#Stability), so relative order between name-value pairs + * with the same name is preserved. + * + * This method can be used, in particular, to increase cache hits. + * + * ```js + * const params = new URLSearchParams('query[]=abc&type=search&query[]=123'); + * params.sort(); + * console.log(params.toString()); + * // Prints query%5B%5D=abc&query%5B%5D=123&type=search + * ``` + * @since v7.7.0, v6.13.0 + */ + sort(): void; + /** + * Returns the search parameters serialized as a string, with characters + * percent-encoded where necessary. + */ + toString(): string; + /** + * Returns an ES6 `Iterator` over the values of each name-value pair. + */ + values(): IterableIterator; + [Symbol.iterator](): IterableIterator<[string, string]>; + } + import { URL as _URL, URLSearchParams as _URLSearchParams } from 'url'; + global { + interface URLSearchParams extends _URLSearchParams {} + interface URL extends _URL {} + interface Global { + URL: typeof _URL; + URLSearchParams: typeof _URLSearchParams; + } + /** + * `URL` class is a global reference for `require('url').URL` + * https://nodejs.org/api/url.html#the-whatwg-url-api + * @since v10.0.0 + */ + var URL: typeof globalThis extends { + onmessage: any; + URL: infer T; + } + ? T + : typeof _URL; + /** + * `URLSearchParams` class is a global reference for `require('url').URLSearchParams` + * https://nodejs.org/api/url.html#class-urlsearchparams + * @since v10.0.0 + */ + var URLSearchParams: typeof globalThis extends { + onmessage: any; + URLSearchParams: infer T; + } + ? T + : typeof _URLSearchParams; + } +} +declare module 'node:url' { + export * from 'url'; +} diff --git a/node_modules/@types/node/ts4.8/util.d.ts b/node_modules/@types/node/ts4.8/util.d.ts new file mode 100755 index 0000000..6976c2b --- /dev/null +++ b/node_modules/@types/node/ts4.8/util.d.ts @@ -0,0 +1,1850 @@ +/** + * The `util` module supports the needs of Node.js internal APIs. Many of the + * utilities are useful for application and module developers as well. To access + * it: + * + * ```js + * const util = require('util'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/util.js) + */ +declare module 'util' { + import * as types from 'node:util/types'; + export interface InspectOptions { + /** + * If set to `true`, getters are going to be + * inspected as well. If set to `'get'` only getters without setter are going + * to be inspected. If set to `'set'` only getters having a corresponding + * setter are going to be inspected. This might cause side effects depending on + * the getter function. + * @default `false` + */ + getters?: 'get' | 'set' | boolean | undefined; + showHidden?: boolean | undefined; + /** + * @default 2 + */ + depth?: number | null | undefined; + colors?: boolean | undefined; + customInspect?: boolean | undefined; + showProxy?: boolean | undefined; + maxArrayLength?: number | null | undefined; + /** + * Specifies the maximum number of characters to + * include when formatting. Set to `null` or `Infinity` to show all elements. + * Set to `0` or negative to show no characters. + * @default 10000 + */ + maxStringLength?: number | null | undefined; + breakLength?: number | undefined; + /** + * Setting this to `false` causes each object key + * to be displayed on a new line. It will also add new lines to text that is + * longer than `breakLength`. If set to a number, the most `n` inner elements + * are united on a single line as long as all properties fit into + * `breakLength`. Short array elements are also grouped together. Note that no + * text will be reduced below 16 characters, no matter the `breakLength` size. + * For more information, see the example below. + * @default `true` + */ + compact?: boolean | number | undefined; + sorted?: boolean | ((a: string, b: string) => number) | undefined; + } + export type Style = 'special' | 'number' | 'bigint' | 'boolean' | 'undefined' | 'null' | 'string' | 'symbol' | 'date' | 'regexp' | 'module'; + export type CustomInspectFunction = (depth: number, options: InspectOptionsStylized) => string; + export interface InspectOptionsStylized extends InspectOptions { + stylize(text: string, styleType: Style): string; + } + /** + * The `util.format()` method returns a formatted string using the first argument + * as a `printf`\-like format string which can contain zero or more format + * specifiers. Each specifier is replaced with the converted value from the + * corresponding argument. Supported specifiers are: + * + * If a specifier does not have a corresponding argument, it is not replaced: + * + * ```js + * util.format('%s:%s', 'foo'); + * // Returns: 'foo:%s' + * ``` + * + * Values that are not part of the format string are formatted using`util.inspect()` if their type is not `string`. + * + * If there are more arguments passed to the `util.format()` method than the + * number of specifiers, the extra arguments are concatenated to the returned + * string, separated by spaces: + * + * ```js + * util.format('%s:%s', 'foo', 'bar', 'baz'); + * // Returns: 'foo:bar baz' + * ``` + * + * If the first argument does not contain a valid format specifier, `util.format()`returns a string that is the concatenation of all arguments separated by spaces: + * + * ```js + * util.format(1, 2, 3); + * // Returns: '1 2 3' + * ``` + * + * If only one argument is passed to `util.format()`, it is returned as it is + * without any formatting: + * + * ```js + * util.format('%% %s'); + * // Returns: '%% %s' + * ``` + * + * `util.format()` is a synchronous method that is intended as a debugging tool. + * Some input values can have a significant performance overhead that can block the + * event loop. Use this function with care and never in a hot code path. + * @since v0.5.3 + * @param format A `printf`-like format string. + */ + export function format(format?: any, ...param: any[]): string; + /** + * This function is identical to {@link format}, except in that it takes + * an `inspectOptions` argument which specifies options that are passed along to {@link inspect}. + * + * ```js + * util.formatWithOptions({ colors: true }, 'See object %O', { foo: 42 }); + * // Returns 'See object { foo: 42 }', where `42` is colored as a number + * // when printed to a terminal. + * ``` + * @since v10.0.0 + */ + export function formatWithOptions(inspectOptions: InspectOptions, format?: any, ...param: any[]): string; + /** + * Returns the string name for a numeric error code that comes from a Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const name = util.getSystemErrorName(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v9.7.0 + */ + export function getSystemErrorName(err: number): string; + /** + * Returns a Map of all system error codes available from the Node.js API. + * The mapping between error codes and error names is platform-dependent. + * See `Common System Errors` for the names of common errors. + * + * ```js + * fs.access('file/that/does/not/exist', (err) => { + * const errorMap = util.getSystemErrorMap(); + * const name = errorMap.get(err.errno); + * console.error(name); // ENOENT + * }); + * ``` + * @since v16.0.0, v14.17.0 + */ + export function getSystemErrorMap(): Map; + /** + * The `util.log()` method prints the given `string` to `stdout` with an included + * timestamp. + * + * ```js + * const util = require('util'); + * + * util.log('Timestamped message.'); + * ``` + * @since v0.3.0 + * @deprecated Since v6.0.0 - Use a third party module instead. + */ + export function log(string: string): void; + /** + * Returns the `string` after replacing any surrogate code points + * (or equivalently, any unpaired surrogate code units) with the + * Unicode "replacement character" U+FFFD. + * @since v16.8.0, v14.18.0 + */ + export function toUSVString(string: string): string; + /** + * Creates and returns an `AbortController` instance whose `AbortSignal` is marked + * as transferable and can be used with `structuredClone()` or `postMessage()`. + * @since v18.11.0 + * @returns A transferable AbortController + */ + export function transferableAbortController(): AbortController; + /** + * Marks the given {AbortSignal} as transferable so that it can be used with + * `structuredClone()` and `postMessage()`. + * + * ```js + * const signal = transferableAbortSignal(AbortSignal.timeout(100)); + * const channel = new MessageChannel(); + * channel.port2.postMessage(signal, [signal]); + * ``` + * @since v18.11.0 + * @param signal The AbortSignal + * @returns The same AbortSignal + */ + export function transferableAbortSignal(signal: AbortSignal): AbortSignal; + /** + * The `util.inspect()` method returns a string representation of `object` that is + * intended for debugging. The output of `util.inspect` may change at any time + * and should not be depended upon programmatically. Additional `options` may be + * passed that alter the result.`util.inspect()` will use the constructor's name and/or `@@toStringTag` to make + * an identifiable tag for an inspected value. + * + * ```js + * class Foo { + * get [Symbol.toStringTag]() { + * return 'bar'; + * } + * } + * + * class Bar {} + * + * const baz = Object.create(null, { [Symbol.toStringTag]: { value: 'foo' } }); + * + * util.inspect(new Foo()); // 'Foo [bar] {}' + * util.inspect(new Bar()); // 'Bar {}' + * util.inspect(baz); // '[foo] {}' + * ``` + * + * Circular references point to their anchor by using a reference index: + * + * ```js + * const { inspect } = require('util'); + * + * const obj = {}; + * obj.a = [obj]; + * obj.b = {}; + * obj.b.inner = obj.b; + * obj.b.obj = obj; + * + * console.log(inspect(obj)); + * // { + * // a: [ [Circular *1] ], + * // b: { inner: [Circular *2], obj: [Circular *1] } + * // } + * ``` + * + * The following example inspects all properties of the `util` object: + * + * ```js + * const util = require('util'); + * + * console.log(util.inspect(util, { showHidden: true, depth: null })); + * ``` + * + * The following example highlights the effect of the `compact` option: + * + * ```js + * const util = require('util'); + * + * const o = { + * a: [1, 2, [[ + * 'Lorem ipsum dolor sit amet,\nconsectetur adipiscing elit, sed do ' + + * 'eiusmod \ntempor incididunt ut labore et dolore magna aliqua.', + * 'test', + * 'foo']], 4], + * b: new Map([['za', 1], ['zb', 'test']]) + * }; + * console.log(util.inspect(o, { compact: true, depth: 5, breakLength: 80 })); + * + * // { a: + * // [ 1, + * // 2, + * // [ [ 'Lorem ipsum dolor sit amet,\nconsectetur [...]', // A long line + * // 'test', + * // 'foo' ] ], + * // 4 ], + * // b: Map(2) { 'za' => 1, 'zb' => 'test' } } + * + * // Setting `compact` to false or an integer creates more reader friendly output. + * console.log(util.inspect(o, { compact: false, depth: 5, breakLength: 80 })); + * + * // { + * // a: [ + * // 1, + * // 2, + * // [ + * // [ + * // 'Lorem ipsum dolor sit amet,\n' + + * // 'consectetur adipiscing elit, sed do eiusmod \n' + + * // 'tempor incididunt ut labore et dolore magna aliqua.', + * // 'test', + * // 'foo' + * // ] + * // ], + * // 4 + * // ], + * // b: Map(2) { + * // 'za' => 1, + * // 'zb' => 'test' + * // } + * // } + * + * // Setting `breakLength` to e.g. 150 will print the "Lorem ipsum" text in a + * // single line. + * ``` + * + * The `showHidden` option allows [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) and + * [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries to be + * inspected. If there are more entries than `maxArrayLength`, there is no + * guarantee which entries are displayed. That means retrieving the same [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) entries twice may + * result in different output. Furthermore, entries + * with no remaining strong references may be garbage collected at any time. + * + * ```js + * const { inspect } = require('util'); + * + * const obj = { a: 1 }; + * const obj2 = { b: 2 }; + * const weakSet = new WeakSet([obj, obj2]); + * + * console.log(inspect(weakSet, { showHidden: true })); + * // WeakSet { { a: 1 }, { b: 2 } } + * ``` + * + * The `sorted` option ensures that an object's property insertion order does not + * impact the result of `util.inspect()`. + * + * ```js + * const { inspect } = require('util'); + * const assert = require('assert'); + * + * const o1 = { + * b: [2, 3, 1], + * a: '`a` comes before `b`', + * c: new Set([2, 3, 1]) + * }; + * console.log(inspect(o1, { sorted: true })); + * // { a: '`a` comes before `b`', b: [ 2, 3, 1 ], c: Set(3) { 1, 2, 3 } } + * console.log(inspect(o1, { sorted: (a, b) => b.localeCompare(a) })); + * // { c: Set(3) { 3, 2, 1 }, b: [ 2, 3, 1 ], a: '`a` comes before `b`' } + * + * const o2 = { + * c: new Set([2, 1, 3]), + * a: '`a` comes before `b`', + * b: [2, 3, 1] + * }; + * assert.strict.equal( + * inspect(o1, { sorted: true }), + * inspect(o2, { sorted: true }) + * ); + * ``` + * + * The `numericSeparator` option adds an underscore every three digits to all + * numbers. + * + * ```js + * const { inspect } = require('util'); + * + * const thousand = 1_000; + * const million = 1_000_000; + * const bigNumber = 123_456_789n; + * const bigDecimal = 1_234.123_45; + * + * console.log(thousand, million, bigNumber, bigDecimal); + * // 1_000 1_000_000 123_456_789n 1_234.123_45 + * ``` + * + * `util.inspect()` is a synchronous method intended for debugging. Its maximum + * output length is approximately 128 MB. Inputs that result in longer output will + * be truncated. + * @since v0.3.0 + * @param object Any JavaScript primitive or `Object`. + * @return The representation of `object`. + */ + export function inspect(object: any, showHidden?: boolean, depth?: number | null, color?: boolean): string; + export function inspect(object: any, options?: InspectOptions): string; + export namespace inspect { + let colors: NodeJS.Dict<[number, number]>; + let styles: { + [K in Style]: string; + }; + let defaultOptions: InspectOptions; + /** + * Allows changing inspect settings from the repl. + */ + let replDefaults: InspectOptions; + /** + * That can be used to declare custom inspect functions. + */ + const custom: unique symbol; + } + /** + * Alias for [`Array.isArray()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray). + * + * Returns `true` if the given `object` is an `Array`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isArray([]); + * // Returns: true + * util.isArray(new Array()); + * // Returns: true + * util.isArray({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use `isArray` instead. + */ + export function isArray(object: unknown): object is unknown[]; + /** + * Returns `true` if the given `object` is a `RegExp`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isRegExp(/some regexp/); + * // Returns: true + * util.isRegExp(new RegExp('another regexp')); + * // Returns: true + * util.isRegExp({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Deprecated + */ + export function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the given `object` is a `Date`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isDate(new Date()); + * // Returns: true + * util.isDate(Date()); + * // false (without 'new' returns a String) + * util.isDate({}); + * // Returns: false + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isDate} instead. + */ + export function isDate(object: unknown): object is Date; + /** + * Returns `true` if the given `object` is an `Error`. Otherwise, returns`false`. + * + * ```js + * const util = require('util'); + * + * util.isError(new Error()); + * // Returns: true + * util.isError(new TypeError()); + * // Returns: true + * util.isError({ name: 'Error', message: 'an error occurred' }); + * // Returns: false + * ``` + * + * This method relies on `Object.prototype.toString()` behavior. It is + * possible to obtain an incorrect result when the `object` argument manipulates`@@toStringTag`. + * + * ```js + * const util = require('util'); + * const obj = { name: 'Error', message: 'an error occurred' }; + * + * util.isError(obj); + * // Returns: false + * obj[Symbol.toStringTag] = 'Error'; + * util.isError(obj); + * // Returns: true + * ``` + * @since v0.6.0 + * @deprecated Since v4.0.0 - Use {@link types.isNativeError} instead. + */ + export function isError(object: unknown): object is Error; + /** + * Usage of `util.inherits()` is discouraged. Please use the ES6 `class` and`extends` keywords to get language level inheritance support. Also note + * that the two styles are [semantically incompatible](https://github.com/nodejs/node/issues/4179). + * + * Inherit the prototype methods from one [constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/constructor) into another. The + * prototype of `constructor` will be set to a new object created from`superConstructor`. + * + * This mainly adds some input validation on top of`Object.setPrototypeOf(constructor.prototype, superConstructor.prototype)`. + * As an additional convenience, `superConstructor` will be accessible + * through the `constructor.super_` property. + * + * ```js + * const util = require('util'); + * const EventEmitter = require('events'); + * + * function MyStream() { + * EventEmitter.call(this); + * } + * + * util.inherits(MyStream, EventEmitter); + * + * MyStream.prototype.write = function(data) { + * this.emit('data', data); + * }; + * + * const stream = new MyStream(); + * + * console.log(stream instanceof EventEmitter); // true + * console.log(MyStream.super_ === EventEmitter); // true + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('It works!'); // Received data: "It works!" + * ``` + * + * ES6 example using `class` and `extends`: + * + * ```js + * const EventEmitter = require('events'); + * + * class MyStream extends EventEmitter { + * write(data) { + * this.emit('data', data); + * } + * } + * + * const stream = new MyStream(); + * + * stream.on('data', (data) => { + * console.log(`Received data: "${data}"`); + * }); + * stream.write('With ES6'); + * ``` + * @since v0.3.0 + * @deprecated Legacy: Use ES2015 class syntax and `extends` keyword instead. + */ + export function inherits(constructor: unknown, superConstructor: unknown): void; + export type DebugLoggerFunction = (msg: string, ...param: unknown[]) => void; + export interface DebugLogger extends DebugLoggerFunction { + enabled: boolean; + } + /** + * The `util.debuglog()` method is used to create a function that conditionally + * writes debug messages to `stderr` based on the existence of the `NODE_DEBUG`environment variable. If the `section` name appears within the value of that + * environment variable, then the returned function operates similar to `console.error()`. If not, then the returned function is a no-op. + * + * ```js + * const util = require('util'); + * const debuglog = util.debuglog('foo'); + * + * debuglog('hello from foo [%d]', 123); + * ``` + * + * If this program is run with `NODE_DEBUG=foo` in the environment, then + * it will output something like: + * + * ```console + * FOO 3245: hello from foo [123] + * ``` + * + * where `3245` is the process id. If it is not run with that + * environment variable set, then it will not print anything. + * + * The `section` supports wildcard also: + * + * ```js + * const util = require('util'); + * const debuglog = util.debuglog('foo-bar'); + * + * debuglog('hi there, it\'s foo-bar [%d]', 2333); + * ``` + * + * if it is run with `NODE_DEBUG=foo*` in the environment, then it will output + * something like: + * + * ```console + * FOO-BAR 3257: hi there, it's foo-bar [2333] + * ``` + * + * Multiple comma-separated `section` names may be specified in the `NODE_DEBUG`environment variable: `NODE_DEBUG=fs,net,tls`. + * + * The optional `callback` argument can be used to replace the logging function + * with a different function that doesn't have any initialization or + * unnecessary wrapping. + * + * ```js + * const util = require('util'); + * let debuglog = util.debuglog('internals', (debug) => { + * // Replace with a logging function that optimizes out + * // testing if the section is enabled + * debuglog = debug; + * }); + * ``` + * @since v0.11.3 + * @param section A string identifying the portion of the application for which the `debuglog` function is being created. + * @param callback A callback invoked the first time the logging function is called with a function argument that is a more optimized logging function. + * @return The logging function + */ + export function debuglog(section: string, callback?: (fn: DebugLoggerFunction) => void): DebugLogger; + export const debug: typeof debuglog; + /** + * Returns `true` if the given `object` is a `Boolean`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isBoolean(1); + * // Returns: false + * util.isBoolean(0); + * // Returns: false + * util.isBoolean(false); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'boolean'` instead. + */ + export function isBoolean(object: unknown): object is boolean; + /** + * Returns `true` if the given `object` is a `Buffer`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isBuffer({ length: 0 }); + * // Returns: false + * util.isBuffer([]); + * // Returns: false + * util.isBuffer(Buffer.from('hello world')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `isBuffer` instead. + */ + export function isBuffer(object: unknown): object is Buffer; + /** + * Returns `true` if the given `object` is a `Function`. Otherwise, returns`false`. + * + * ```js + * const util = require('util'); + * + * function Foo() {} + * const Bar = () => {}; + * + * util.isFunction({}); + * // Returns: false + * util.isFunction(Foo); + * // Returns: true + * util.isFunction(Bar); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'function'` instead. + */ + export function isFunction(object: unknown): boolean; + /** + * Returns `true` if the given `object` is strictly `null`. Otherwise, returns`false`. + * + * ```js + * const util = require('util'); + * + * util.isNull(0); + * // Returns: false + * util.isNull(undefined); + * // Returns: false + * util.isNull(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === null` instead. + */ + export function isNull(object: unknown): object is null; + /** + * Returns `true` if the given `object` is `null` or `undefined`. Otherwise, + * returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isNullOrUndefined(0); + * // Returns: false + * util.isNullOrUndefined(undefined); + * // Returns: true + * util.isNullOrUndefined(null); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined || value === null` instead. + */ + export function isNullOrUndefined(object: unknown): object is null | undefined; + /** + * Returns `true` if the given `object` is a `Number`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isNumber(false); + * // Returns: false + * util.isNumber(Infinity); + * // Returns: true + * util.isNumber(0); + * // Returns: true + * util.isNumber(NaN); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'number'` instead. + */ + export function isNumber(object: unknown): object is number; + /** + * Returns `true` if the given `object` is strictly an `Object`**and** not a`Function` (even though functions are objects in JavaScript). + * Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isObject(5); + * // Returns: false + * util.isObject(null); + * // Returns: false + * util.isObject({}); + * // Returns: true + * util.isObject(() => {}); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Deprecated: Use `value !== null && typeof value === 'object'` instead. + */ + export function isObject(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a primitive type. Otherwise, returns`false`. + * + * ```js + * const util = require('util'); + * + * util.isPrimitive(5); + * // Returns: true + * util.isPrimitive('foo'); + * // Returns: true + * util.isPrimitive(false); + * // Returns: true + * util.isPrimitive(null); + * // Returns: true + * util.isPrimitive(undefined); + * // Returns: true + * util.isPrimitive({}); + * // Returns: false + * util.isPrimitive(() => {}); + * // Returns: false + * util.isPrimitive(/^$/); + * // Returns: false + * util.isPrimitive(new Date()); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `(typeof value !== 'object' && typeof value !== 'function') || value === null` instead. + */ + export function isPrimitive(object: unknown): boolean; + /** + * Returns `true` if the given `object` is a `string`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isString(''); + * // Returns: true + * util.isString('foo'); + * // Returns: true + * util.isString(String('foo')); + * // Returns: true + * util.isString(5); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'string'` instead. + */ + export function isString(object: unknown): object is string; + /** + * Returns `true` if the given `object` is a `Symbol`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * util.isSymbol(5); + * // Returns: false + * util.isSymbol('foo'); + * // Returns: false + * util.isSymbol(Symbol('foo')); + * // Returns: true + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `typeof value === 'symbol'` instead. + */ + export function isSymbol(object: unknown): object is symbol; + /** + * Returns `true` if the given `object` is `undefined`. Otherwise, returns `false`. + * + * ```js + * const util = require('util'); + * + * const foo = undefined; + * util.isUndefined(5); + * // Returns: false + * util.isUndefined(foo); + * // Returns: true + * util.isUndefined(null); + * // Returns: false + * ``` + * @since v0.11.5 + * @deprecated Since v4.0.0 - Use `value === undefined` instead. + */ + export function isUndefined(object: unknown): object is undefined; + /** + * The `util.deprecate()` method wraps `fn` (which may be a function or class) in + * such a way that it is marked as deprecated. + * + * ```js + * const util = require('util'); + * + * exports.obsoleteFunction = util.deprecate(() => { + * // Do something here. + * }, 'obsoleteFunction() is deprecated. Use newShinyFunction() instead.'); + * ``` + * + * When called, `util.deprecate()` will return a function that will emit a`DeprecationWarning` using the `'warning'` event. The warning will + * be emitted and printed to `stderr` the first time the returned function is + * called. After the warning is emitted, the wrapped function is called without + * emitting a warning. + * + * If the same optional `code` is supplied in multiple calls to `util.deprecate()`, + * the warning will be emitted only once for that `code`. + * + * ```js + * const util = require('util'); + * + * const fn1 = util.deprecate(someFunction, someMessage, 'DEP0001'); + * const fn2 = util.deprecate(someOtherFunction, someOtherMessage, 'DEP0001'); + * fn1(); // Emits a deprecation warning with code DEP0001 + * fn2(); // Does not emit a deprecation warning because it has the same code + * ``` + * + * If either the `--no-deprecation` or `--no-warnings` command-line flags are + * used, or if the `process.noDeprecation` property is set to `true`_prior_ to + * the first deprecation warning, the `util.deprecate()` method does nothing. + * + * If the `--trace-deprecation` or `--trace-warnings` command-line flags are set, + * or the `process.traceDeprecation` property is set to `true`, a warning and a + * stack trace are printed to `stderr` the first time the deprecated function is + * called. + * + * If the `--throw-deprecation` command-line flag is set, or the`process.throwDeprecation` property is set to `true`, then an exception will be + * thrown when the deprecated function is called. + * + * The `--throw-deprecation` command-line flag and `process.throwDeprecation`property take precedence over `--trace-deprecation` and`process.traceDeprecation`. + * @since v0.8.0 + * @param fn The function that is being deprecated. + * @param msg A warning message to display when the deprecated function is invoked. + * @param code A deprecation code. See the `list of deprecated APIs` for a list of codes. + * @return The deprecated function wrapped to emit a warning. + */ + export function deprecate(fn: T, msg: string, code?: string): T; + /** + * Returns `true` if there is deep strict equality between `val1` and `val2`. + * Otherwise, returns `false`. + * + * See `assert.deepStrictEqual()` for more information about deep strict + * equality. + * @since v9.0.0 + */ + export function isDeepStrictEqual(val1: unknown, val2: unknown): boolean; + /** + * Returns `str` with any ANSI escape codes removed. + * + * ```js + * console.log(util.stripVTControlCharacters('\u001B[4mvalue\u001B[0m')); + * // Prints "value" + * ``` + * @since v16.11.0 + */ + export function stripVTControlCharacters(str: string): string; + /** + * Takes an `async` function (or a function that returns a `Promise`) and returns a + * function following the error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument. In the callback, the + * first argument will be the rejection reason (or `null` if the `Promise`resolved), and the second argument will be the resolved value. + * + * ```js + * const util = require('util'); + * + * async function fn() { + * return 'hello world'; + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * if (err) throw err; + * console.log(ret); + * }); + * ``` + * + * Will print: + * + * ```text + * hello world + * ``` + * + * The callback is executed asynchronously, and will have a limited stack trace. + * If the callback throws, the process will emit an `'uncaughtException'` event, and if not handled will exit. + * + * Since `null` has a special meaning as the first argument to a callback, if a + * wrapped function rejects a `Promise` with a falsy value as a reason, the value + * is wrapped in an `Error` with the original value stored in a field named`reason`. + * + * ```js + * function fn() { + * return Promise.reject(null); + * } + * const callbackFunction = util.callbackify(fn); + * + * callbackFunction((err, ret) => { + * // When the Promise was rejected with `null` it is wrapped with an Error and + * // the original value is stored in `reason`. + * err && Object.hasOwn(err, 'reason') && err.reason === null; // true + * }); + * ``` + * @since v8.2.0 + * @param original An `async` function + * @return a callback style function + */ + export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify(fn: () => Promise): (callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify(fn: (arg1: T1) => Promise): (arg1: T1, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify(fn: (arg1: T1) => Promise): (arg1: T1, callback: (err: NodeJS.ErrnoException, result: TResult) => void) => void; + export function callbackify(fn: (arg1: T1, arg2: T2) => Promise): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify(fn: (arg1: T1, arg2: T2) => Promise): (arg1: T1, arg2: T2, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify(fn: (arg1: T1, arg2: T2, arg3: T3) => Promise): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, callback: (err: NodeJS.ErrnoException) => void) => void; + export function callbackify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6) => Promise + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6, callback: (err: NodeJS.ErrnoException | null, result: TResult) => void) => void; + export interface CustomPromisifyLegacy extends Function { + __promisify__: TCustom; + } + export interface CustomPromisifySymbol extends Function { + [promisify.custom]: TCustom; + } + export type CustomPromisify = CustomPromisifySymbol | CustomPromisifyLegacy; + /** + * Takes a function following the common error-first callback style, i.e. taking + * an `(err, value) => ...` callback as the last argument, and returns a version + * that returns promises. + * + * ```js + * const util = require('util'); + * const fs = require('fs'); + * + * const stat = util.promisify(fs.stat); + * stat('.').then((stats) => { + * // Do something with `stats` + * }).catch((error) => { + * // Handle the error. + * }); + * ``` + * + * Or, equivalently using `async function`s: + * + * ```js + * const util = require('util'); + * const fs = require('fs'); + * + * const stat = util.promisify(fs.stat); + * + * async function callStat() { + * const stats = await stat('.'); + * console.log(`This directory is owned by ${stats.uid}`); + * } + * ``` + * + * If there is an `original[util.promisify.custom]` property present, `promisify`will return its value, see `Custom promisified functions`. + * + * `promisify()` assumes that `original` is a function taking a callback as its + * final argument in all cases. If `original` is not a function, `promisify()`will throw an error. If `original` is a function but its last argument is not + * an error-first callback, it will still be passed an error-first + * callback as its last argument. + * + * Using `promisify()` on class methods or other methods that use `this` may not + * work as expected unless handled specially: + * + * ```js + * const util = require('util'); + * + * class Foo { + * constructor() { + * this.a = 42; + * } + * + * bar(callback) { + * callback(null, this.a); + * } + * } + * + * const foo = new Foo(); + * + * const naiveBar = util.promisify(foo.bar); + * // TypeError: Cannot read property 'a' of undefined + * // naiveBar().then(a => console.log(a)); + * + * naiveBar.call(foo).then((a) => console.log(a)); // '42' + * + * const bindBar = naiveBar.bind(foo); + * bindBar().then((a) => console.log(a)); // '42' + * ``` + * @since v8.0.0 + */ + export function promisify(fn: CustomPromisify): TCustom; + export function promisify(fn: (callback: (err: any, result: TResult) => void) => void): () => Promise; + export function promisify(fn: (callback: (err?: any) => void) => void): () => Promise; + export function promisify(fn: (arg1: T1, callback: (err: any, result: TResult) => void) => void): (arg1: T1) => Promise; + export function promisify(fn: (arg1: T1, callback: (err?: any) => void) => void): (arg1: T1) => Promise; + export function promisify(fn: (arg1: T1, arg2: T2, callback: (err: any, result: TResult) => void) => void): (arg1: T1, arg2: T2) => Promise; + export function promisify(fn: (arg1: T1, arg2: T2, callback: (err?: any) => void) => void): (arg1: T1, arg2: T2) => Promise; + export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err: any, result: TResult) => void) => void): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, callback: (err?: any) => void) => void): (arg1: T1, arg2: T2, arg3: T3) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err: any, result: TResult) => void) => void + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify(fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, callback: (err?: any) => void) => void): (arg1: T1, arg2: T2, arg3: T3, arg4: T4) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err: any, result: TResult) => void) => void + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify( + fn: (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, callback: (err?: any) => void) => void + ): (arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5) => Promise; + export function promisify(fn: Function): Function; + export namespace promisify { + /** + * That can be used to declare custom promisified variants of functions. + */ + const custom: unique symbol; + } + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextDecoder` API. + * + * ```js + * const decoder = new TextDecoder(); + * const u8arr = new Uint8Array([72, 101, 108, 108, 111]); + * console.log(decoder.decode(u8arr)); // Hello + * ``` + * @since v8.3.0 + */ + export class TextDecoder { + /** + * The encoding supported by the `TextDecoder` instance. + */ + readonly encoding: string; + /** + * The value will be `true` if decoding errors result in a `TypeError` being + * thrown. + */ + readonly fatal: boolean; + /** + * The value will be `true` if the decoding result will include the byte order + * mark. + */ + readonly ignoreBOM: boolean; + constructor( + encoding?: string, + options?: { + fatal?: boolean | undefined; + ignoreBOM?: boolean | undefined; + } + ); + /** + * Decodes the `input` and returns a string. If `options.stream` is `true`, any + * incomplete byte sequences occurring at the end of the `input` are buffered + * internally and emitted after the next call to `textDecoder.decode()`. + * + * If `textDecoder.fatal` is `true`, decoding errors that occur will result in a`TypeError` being thrown. + * @param input An `ArrayBuffer`, `DataView` or `TypedArray` instance containing the encoded data. + */ + decode( + input?: NodeJS.ArrayBufferView | ArrayBuffer | null, + options?: { + stream?: boolean | undefined; + } + ): string; + } + export interface EncodeIntoResult { + /** + * The read Unicode code units of input. + */ + read: number; + /** + * The written UTF-8 bytes of output. + */ + written: number; + } + export { types }; + + //// TextEncoder/Decoder + /** + * An implementation of the [WHATWG Encoding Standard](https://encoding.spec.whatwg.org/) `TextEncoder` API. All + * instances of `TextEncoder` only support UTF-8 encoding. + * + * ```js + * const encoder = new TextEncoder(); + * const uint8array = encoder.encode('this is some data'); + * ``` + * + * The `TextEncoder` class is also available on the global object. + * @since v8.3.0 + */ + export class TextEncoder { + /** + * The encoding supported by the `TextEncoder` instance. Always set to `'utf-8'`. + */ + readonly encoding: string; + /** + * UTF-8 encodes the `input` string and returns a `Uint8Array` containing the + * encoded bytes. + * @param [input='an empty string'] The text to encode. + */ + encode(input?: string): Uint8Array; + /** + * UTF-8 encodes the `src` string to the `dest` Uint8Array and returns an object + * containing the read Unicode code units and written UTF-8 bytes. + * + * ```js + * const encoder = new TextEncoder(); + * const src = 'this is some data'; + * const dest = new Uint8Array(10); + * const { read, written } = encoder.encodeInto(src, dest); + * ``` + * @param src The text to encode. + * @param dest The array to hold the encode result. + */ + encodeInto(src: string, dest: Uint8Array): EncodeIntoResult; + } + + import { TextDecoder as _TextDecoder, TextEncoder as _TextEncoder } from 'util'; + global { + /** + * `TextDecoder` class is a global reference for `require('util').TextDecoder` + * https://nodejs.org/api/globals.html#textdecoder + * @since v11.0.0 + */ + var TextDecoder: typeof globalThis extends { + onmessage: any; + TextDecoder: infer TextDecoder; + } + ? TextDecoder + : typeof _TextDecoder; + + /** + * `TextEncoder` class is a global reference for `require('util').TextEncoder` + * https://nodejs.org/api/globals.html#textencoder + * @since v11.0.0 + */ + var TextEncoder: typeof globalThis extends { + onmessage: any; + TextEncoder: infer TextEncoder; + } + ? TextEncoder + : typeof _TextEncoder; + } + + //// parseArgs + /** + * Provides a high-level API for command-line argument parsing. Takes a + * specification for the expected arguments and returns a structured object + * with the parsed values and positionals. + * + * `config` provides arguments for parsing and configures the parser. It + * supports the following properties: + * + * - `args` The array of argument strings. **Default:** `process.argv` with + * `execPath` and `filename` removed. + * - `options` Arguments known to the parser. Keys of `options` are the long + * names of options and values are objects accepting the following properties: + * + * - `type` Type of argument, which must be either `boolean` (for options + * which do not take values) or `string` (for options which do). + * - `multiple` Whether this option can be provided multiple + * times. If `true`, all values will be collected in an array. If + * `false`, values for the option are last-wins. **Default:** `false`. + * - `short` A single character alias for the option. + * - `default` The default option value when it is not set by args. It + * must be of the same type as the `type` property. When `multiple` + * is `true`, it must be an array. + * + * - `strict`: Whether an error should be thrown when unknown arguments + * are encountered, or when arguments are passed that do not match the + * `type` configured in `options`. **Default:** `true`. + * - `allowPositionals`: Whether this command accepts positional arguments. + * **Default:** `false` if `strict` is `true`, otherwise `true`. + * - `tokens`: Whether tokens {boolean} Return the parsed tokens. This is useful + * for extending the built-in behavior, from adding additional checks through + * to reprocessing the tokens in different ways. + * **Default:** `false`. + * + * @returns The parsed command line arguments: + * + * - `values` A mapping of parsed option names with their string + * or boolean values. + * - `positionals` Positional arguments. + * - `tokens` Detailed parse information (only if `tokens` was specified). + * + */ + export function parseArgs(config: T): ParsedResults; + + interface ParseArgsOptionConfig { + type: 'string' | 'boolean'; + short?: string; + multiple?: boolean; + /** + * @since v18.11.0 + */ + default?: string | boolean | string[] | boolean[]; + } + + interface ParseArgsOptionsConfig { + [longOption: string]: ParseArgsOptionConfig; + } + + export interface ParseArgsConfig { + strict?: boolean; + allowPositionals?: boolean; + tokens?: boolean; + options?: ParseArgsOptionsConfig; + args?: string[]; + } + + /* + IfDefaultsTrue and IfDefaultsFalse are helpers to handle default values for missing boolean properties. + TypeScript does not have exact types for objects: https://github.com/microsoft/TypeScript/issues/12936 + This means it is impossible to distinguish between "field X is definitely not present" and "field X may or may not be present". + But we expect users to generally provide their config inline or `as const`, which means TS will always know whether a given field is present. + So this helper treats "not definitely present" (i.e., not `extends boolean`) as being "definitely not present", i.e. it should have its default value. + This is technically incorrect but is a much nicer UX for the common case. + The IfDefaultsTrue version is for things which default to true; the IfDefaultsFalse version is for things which default to false. + */ + type IfDefaultsTrue = T extends true + ? IfTrue + : T extends false + ? IfFalse + : IfTrue; + + // we put the `extends false` condition first here because `undefined` compares like `any` when `strictNullChecks: false` + type IfDefaultsFalse = T extends false + ? IfFalse + : T extends true + ? IfTrue + : IfFalse; + + type ExtractOptionValue = IfDefaultsTrue< + T['strict'], + O['type'] extends 'string' ? string : O['type'] extends 'boolean' ? boolean : string | boolean, + string | boolean + >; + + type ParsedValues = + & IfDefaultsTrue + & (T['options'] extends ParseArgsOptionsConfig + ? { + -readonly [LongOption in keyof T['options']]: IfDefaultsFalse< + T['options'][LongOption]['multiple'], + undefined | Array>, + undefined | ExtractOptionValue + >; + } + : {}); + + type ParsedPositionals = IfDefaultsTrue< + T['strict'], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type PreciseTokenForOptions< + K extends string, + O extends ParseArgsOptionConfig, + > = O['type'] extends 'string' + ? { + kind: 'option'; + index: number; + name: K; + rawName: string; + value: string; + inlineValue: boolean; + } + : O['type'] extends 'boolean' + ? { + kind: 'option'; + index: number; + name: K; + rawName: string; + value: undefined; + inlineValue: undefined; + } + : OptionToken & { name: K }; + + type TokenForOptions< + T extends ParseArgsConfig, + K extends keyof T['options'] = keyof T['options'], + > = K extends unknown + ? T['options'] extends ParseArgsOptionsConfig + ? PreciseTokenForOptions + : OptionToken + : never; + + type ParsedOptionToken = IfDefaultsTrue, OptionToken>; + + type ParsedPositionalToken = IfDefaultsTrue< + T['strict'], + IfDefaultsFalse, + IfDefaultsTrue + >; + + type ParsedTokens = Array< + ParsedOptionToken | ParsedPositionalToken | { kind: 'option-terminator'; index: number } + >; + + type PreciseParsedResults = IfDefaultsFalse< + T['tokens'], + { + values: ParsedValues; + positionals: ParsedPositionals; + tokens: ParsedTokens; + }, + { + values: ParsedValues; + positionals: ParsedPositionals; + } + >; + + type OptionToken = + | { kind: 'option'; index: number; name: string; rawName: string; value: string; inlineValue: boolean } + | { + kind: 'option'; + index: number; + name: string; + rawName: string; + value: undefined; + inlineValue: undefined; + }; + + type Token = + | OptionToken + | { kind: 'positional'; index: number; value: string } + | { kind: 'option-terminator'; index: number }; + + // If ParseArgsConfig extends T, then the user passed config constructed elsewhere. + // So we can't rely on the `"not definitely present" implies "definitely not present"` assumption mentioned above. + type ParsedResults = ParseArgsConfig extends T + ? { + values: { [longOption: string]: undefined | string | boolean | Array }; + positionals: string[]; + tokens?: Token[]; + } + : PreciseParsedResults; +} +declare module 'util/types' { + export * from 'util/types'; +} +declare module 'util/types' { + import { KeyObject, webcrypto } from 'node:crypto'; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) or + * [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * + * See also `util.types.isArrayBuffer()` and `util.types.isSharedArrayBuffer()`. + * + * ```js + * util.types.isAnyArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isAnyArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isAnyArrayBuffer(object: unknown): object is ArrayBufferLike; + /** + * Returns `true` if the value is an `arguments` object. + * + * ```js + * function foo() { + * util.types.isArgumentsObject(arguments); // Returns true + * } + * ``` + * @since v10.0.0 + */ + function isArgumentsObject(object: unknown): object is IArguments; + /** + * Returns `true` if the value is a built-in [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instance. + * This does _not_ include [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isArrayBuffer(new ArrayBuffer()); // Returns true + * util.types.isArrayBuffer(new SharedArrayBuffer()); // Returns false + * ``` + * @since v10.0.0 + */ + function isArrayBuffer(object: unknown): object is ArrayBuffer; + /** + * Returns `true` if the value is an instance of one of the [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) views, such as typed + * array objects or [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView). Equivalent to + * [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * + * ```js + * util.types.isArrayBufferView(new Int8Array()); // true + * util.types.isArrayBufferView(Buffer.from('hello world')); // true + * util.types.isArrayBufferView(new DataView(new ArrayBuffer(16))); // true + * util.types.isArrayBufferView(new ArrayBuffer()); // false + * ``` + * @since v10.0.0 + */ + function isArrayBufferView(object: unknown): object is NodeJS.ArrayBufferView; + /** + * Returns `true` if the value is an [async function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function). + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isAsyncFunction(function foo() {}); // Returns false + * util.types.isAsyncFunction(async function foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isAsyncFunction(object: unknown): boolean; + /** + * Returns `true` if the value is a `BigInt64Array` instance. + * + * ```js + * util.types.isBigInt64Array(new BigInt64Array()); // Returns true + * util.types.isBigInt64Array(new BigUint64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isBigInt64Array(value: unknown): value is BigInt64Array; + /** + * Returns `true` if the value is a `BigUint64Array` instance. + * + * ```js + * util.types.isBigUint64Array(new BigInt64Array()); // Returns false + * util.types.isBigUint64Array(new BigUint64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isBigUint64Array(value: unknown): value is BigUint64Array; + /** + * Returns `true` if the value is a boolean object, e.g. created + * by `new Boolean()`. + * + * ```js + * util.types.isBooleanObject(false); // Returns false + * util.types.isBooleanObject(true); // Returns false + * util.types.isBooleanObject(new Boolean(false)); // Returns true + * util.types.isBooleanObject(new Boolean(true)); // Returns true + * util.types.isBooleanObject(Boolean(false)); // Returns false + * util.types.isBooleanObject(Boolean(true)); // Returns false + * ``` + * @since v10.0.0 + */ + function isBooleanObject(object: unknown): object is Boolean; + /** + * Returns `true` if the value is any boxed primitive object, e.g. created + * by `new Boolean()`, `new String()` or `Object(Symbol())`. + * + * For example: + * + * ```js + * util.types.isBoxedPrimitive(false); // Returns false + * util.types.isBoxedPrimitive(new Boolean(false)); // Returns true + * util.types.isBoxedPrimitive(Symbol('foo')); // Returns false + * util.types.isBoxedPrimitive(Object(Symbol('foo'))); // Returns true + * util.types.isBoxedPrimitive(Object(BigInt(5))); // Returns true + * ``` + * @since v10.11.0 + */ + function isBoxedPrimitive(object: unknown): object is String | Number | BigInt | Boolean | Symbol; + /** + * Returns `true` if the value is a built-in [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView) instance. + * + * ```js + * const ab = new ArrayBuffer(20); + * util.types.isDataView(new DataView(ab)); // Returns true + * util.types.isDataView(new Float64Array()); // Returns false + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isDataView(object: unknown): object is DataView; + /** + * Returns `true` if the value is a built-in [`Date`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date) instance. + * + * ```js + * util.types.isDate(new Date()); // Returns true + * ``` + * @since v10.0.0 + */ + function isDate(object: unknown): object is Date; + /** + * Returns `true` if the value is a native `External` value. + * + * A native `External` value is a special type of object that contains a + * raw C++ pointer (`void*`) for access from native code, and has no other + * properties. Such objects are created either by Node.js internals or native + * addons. In JavaScript, they are [frozen](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze) objects with a`null` prototype. + * + * ```c + * #include + * #include + * napi_value result; + * static napi_value MyNapi(napi_env env, napi_callback_info info) { + * int* raw = (int*) malloc(1024); + * napi_status status = napi_create_external(env, (void*) raw, NULL, NULL, &result); + * if (status != napi_ok) { + * napi_throw_error(env, NULL, "napi_create_external failed"); + * return NULL; + * } + * return result; + * } + * ... + * DECLARE_NAPI_PROPERTY("myNapi", MyNapi) + * ... + * ``` + * + * ```js + * const native = require('napi_addon.node'); + * const data = native.myNapi(); + * util.types.isExternal(data); // returns true + * util.types.isExternal(0); // returns false + * util.types.isExternal(new String('foo')); // returns false + * ``` + * + * For further information on `napi_create_external`, refer to `napi_create_external()`. + * @since v10.0.0 + */ + function isExternal(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array) instance. + * + * ```js + * util.types.isFloat32Array(new ArrayBuffer()); // Returns false + * util.types.isFloat32Array(new Float32Array()); // Returns true + * util.types.isFloat32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isFloat32Array(object: unknown): object is Float32Array; + /** + * Returns `true` if the value is a built-in [`Float64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float64Array) instance. + * + * ```js + * util.types.isFloat64Array(new ArrayBuffer()); // Returns false + * util.types.isFloat64Array(new Uint8Array()); // Returns false + * util.types.isFloat64Array(new Float64Array()); // Returns true + * ``` + * @since v10.0.0 + */ + function isFloat64Array(object: unknown): object is Float64Array; + /** + * Returns `true` if the value is a generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * util.types.isGeneratorFunction(function foo() {}); // Returns false + * util.types.isGeneratorFunction(function* foo() {}); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorFunction(object: unknown): object is GeneratorFunction; + /** + * Returns `true` if the value is a generator object as returned from a + * built-in generator function. + * This only reports back what the JavaScript engine is seeing; + * in particular, the return value may not match the original source code if + * a transpilation tool was used. + * + * ```js + * function* foo() {} + * const generator = foo(); + * util.types.isGeneratorObject(generator); // Returns true + * ``` + * @since v10.0.0 + */ + function isGeneratorObject(object: unknown): object is Generator; + /** + * Returns `true` if the value is a built-in [`Int8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int8Array) instance. + * + * ```js + * util.types.isInt8Array(new ArrayBuffer()); // Returns false + * util.types.isInt8Array(new Int8Array()); // Returns true + * util.types.isInt8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt8Array(object: unknown): object is Int8Array; + /** + * Returns `true` if the value is a built-in [`Int16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int16Array) instance. + * + * ```js + * util.types.isInt16Array(new ArrayBuffer()); // Returns false + * util.types.isInt16Array(new Int16Array()); // Returns true + * util.types.isInt16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt16Array(object: unknown): object is Int16Array; + /** + * Returns `true` if the value is a built-in [`Int32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int32Array) instance. + * + * ```js + * util.types.isInt32Array(new ArrayBuffer()); // Returns false + * util.types.isInt32Array(new Int32Array()); // Returns true + * util.types.isInt32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isInt32Array(object: unknown): object is Int32Array; + /** + * Returns `true` if the value is a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * util.types.isMap(new Map()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMap(object: T | {}): object is T extends ReadonlyMap ? (unknown extends T ? never : ReadonlyMap) : Map; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map) instance. + * + * ```js + * const map = new Map(); + * util.types.isMapIterator(map.keys()); // Returns true + * util.types.isMapIterator(map.values()); // Returns true + * util.types.isMapIterator(map.entries()); // Returns true + * util.types.isMapIterator(map[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isMapIterator(object: unknown): boolean; + /** + * Returns `true` if the value is an instance of a [Module Namespace Object](https://tc39.github.io/ecma262/#sec-module-namespace-exotic-objects). + * + * ```js + * import * as ns from './a.js'; + * + * util.types.isModuleNamespaceObject(ns); // Returns true + * ``` + * @since v10.0.0 + */ + function isModuleNamespaceObject(value: unknown): boolean; + /** + * Returns `true` if the value is an instance of a built-in `Error` type. + * + * ```js + * util.types.isNativeError(new Error()); // Returns true + * util.types.isNativeError(new TypeError()); // Returns true + * util.types.isNativeError(new RangeError()); // Returns true + * ``` + * @since v10.0.0 + */ + function isNativeError(object: unknown): object is Error; + /** + * Returns `true` if the value is a number object, e.g. created + * by `new Number()`. + * + * ```js + * util.types.isNumberObject(0); // Returns false + * util.types.isNumberObject(new Number(0)); // Returns true + * ``` + * @since v10.0.0 + */ + function isNumberObject(object: unknown): object is Number; + /** + * Returns `true` if the value is a built-in [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise). + * + * ```js + * util.types.isPromise(Promise.resolve(42)); // Returns true + * ``` + * @since v10.0.0 + */ + function isPromise(object: unknown): object is Promise; + /** + * Returns `true` if the value is a [`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy) instance. + * + * ```js + * const target = {}; + * const proxy = new Proxy(target, {}); + * util.types.isProxy(target); // Returns false + * util.types.isProxy(proxy); // Returns true + * ``` + * @since v10.0.0 + */ + function isProxy(object: unknown): boolean; + /** + * Returns `true` if the value is a regular expression object. + * + * ```js + * util.types.isRegExp(/abc/); // Returns true + * util.types.isRegExp(new RegExp('abc')); // Returns true + * ``` + * @since v10.0.0 + */ + function isRegExp(object: unknown): object is RegExp; + /** + * Returns `true` if the value is a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * util.types.isSet(new Set()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSet(object: T | {}): object is T extends ReadonlySet ? (unknown extends T ? never : ReadonlySet) : Set; + /** + * Returns `true` if the value is an iterator returned for a built-in [`Set`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set) instance. + * + * ```js + * const set = new Set(); + * util.types.isSetIterator(set.keys()); // Returns true + * util.types.isSetIterator(set.values()); // Returns true + * util.types.isSetIterator(set.entries()); // Returns true + * util.types.isSetIterator(set[Symbol.iterator]()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSetIterator(object: unknown): boolean; + /** + * Returns `true` if the value is a built-in [`SharedArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) instance. + * This does _not_ include [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) instances. Usually, it is + * desirable to test for both; See `util.types.isAnyArrayBuffer()` for that. + * + * ```js + * util.types.isSharedArrayBuffer(new ArrayBuffer()); // Returns false + * util.types.isSharedArrayBuffer(new SharedArrayBuffer()); // Returns true + * ``` + * @since v10.0.0 + */ + function isSharedArrayBuffer(object: unknown): object is SharedArrayBuffer; + /** + * Returns `true` if the value is a string object, e.g. created + * by `new String()`. + * + * ```js + * util.types.isStringObject('foo'); // Returns false + * util.types.isStringObject(new String('foo')); // Returns true + * ``` + * @since v10.0.0 + */ + function isStringObject(object: unknown): object is String; + /** + * Returns `true` if the value is a symbol object, created + * by calling `Object()` on a `Symbol` primitive. + * + * ```js + * const symbol = Symbol('foo'); + * util.types.isSymbolObject(symbol); // Returns false + * util.types.isSymbolObject(Object(symbol)); // Returns true + * ``` + * @since v10.0.0 + */ + function isSymbolObject(object: unknown): object is Symbol; + /** + * Returns `true` if the value is a built-in [`TypedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray) instance. + * + * ```js + * util.types.isTypedArray(new ArrayBuffer()); // Returns false + * util.types.isTypedArray(new Uint8Array()); // Returns true + * util.types.isTypedArray(new Float64Array()); // Returns true + * ``` + * + * See also [`ArrayBuffer.isView()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView). + * @since v10.0.0 + */ + function isTypedArray(object: unknown): object is NodeJS.TypedArray; + /** + * Returns `true` if the value is a built-in [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) instance. + * + * ```js + * util.types.isUint8Array(new ArrayBuffer()); // Returns false + * util.types.isUint8Array(new Uint8Array()); // Returns true + * util.types.isUint8Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8Array(object: unknown): object is Uint8Array; + /** + * Returns `true` if the value is a built-in [`Uint8ClampedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray) instance. + * + * ```js + * util.types.isUint8ClampedArray(new ArrayBuffer()); // Returns false + * util.types.isUint8ClampedArray(new Uint8ClampedArray()); // Returns true + * util.types.isUint8ClampedArray(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint8ClampedArray(object: unknown): object is Uint8ClampedArray; + /** + * Returns `true` if the value is a built-in [`Uint16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint16Array) instance. + * + * ```js + * util.types.isUint16Array(new ArrayBuffer()); // Returns false + * util.types.isUint16Array(new Uint16Array()); // Returns true + * util.types.isUint16Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint16Array(object: unknown): object is Uint16Array; + /** + * Returns `true` if the value is a built-in [`Uint32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array) instance. + * + * ```js + * util.types.isUint32Array(new ArrayBuffer()); // Returns false + * util.types.isUint32Array(new Uint32Array()); // Returns true + * util.types.isUint32Array(new Float64Array()); // Returns false + * ``` + * @since v10.0.0 + */ + function isUint32Array(object: unknown): object is Uint32Array; + /** + * Returns `true` if the value is a built-in [`WeakMap`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap) instance. + * + * ```js + * util.types.isWeakMap(new WeakMap()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakMap(object: unknown): object is WeakMap; + /** + * Returns `true` if the value is a built-in [`WeakSet`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet) instance. + * + * ```js + * util.types.isWeakSet(new WeakSet()); // Returns true + * ``` + * @since v10.0.0 + */ + function isWeakSet(object: unknown): object is WeakSet; + /** + * Returns `true` if `value` is a `KeyObject`, `false` otherwise. + * @since v16.2.0 + */ + function isKeyObject(object: unknown): object is KeyObject; + /** + * Returns `true` if `value` is a `CryptoKey`, `false` otherwise. + * @since v16.2.0 + */ + function isCryptoKey(object: unknown): object is webcrypto.CryptoKey; +} +declare module 'node:util' { + export * from 'util'; +} +declare module 'node:util/types' { + export * from 'util/types'; +} diff --git a/node_modules/@types/node/ts4.8/v8.d.ts b/node_modules/@types/node/ts4.8/v8.d.ts new file mode 100755 index 0000000..6685dc2 --- /dev/null +++ b/node_modules/@types/node/ts4.8/v8.d.ts @@ -0,0 +1,396 @@ +/** + * The `v8` module exposes APIs that are specific to the version of [V8](https://developers.google.com/v8/) built into the Node.js binary. It can be accessed using: + * + * ```js + * const v8 = require('v8'); + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/v8.js) + */ +declare module 'v8' { + import { Readable } from 'node:stream'; + interface HeapSpaceInfo { + space_name: string; + space_size: number; + space_used_size: number; + space_available_size: number; + physical_space_size: number; + } + // ** Signifies if the --zap_code_space option is enabled or not. 1 == enabled, 0 == disabled. */ + type DoesZapCodeSpaceFlag = 0 | 1; + interface HeapInfo { + total_heap_size: number; + total_heap_size_executable: number; + total_physical_size: number; + total_available_size: number; + used_heap_size: number; + heap_size_limit: number; + malloced_memory: number; + peak_malloced_memory: number; + does_zap_garbage: DoesZapCodeSpaceFlag; + number_of_native_contexts: number; + number_of_detached_contexts: number; + } + interface HeapCodeStatistics { + code_and_metadata_size: number; + bytecode_and_metadata_size: number; + external_script_source_size: number; + } + /** + * Returns an integer representing a version tag derived from the V8 version, + * command-line flags, and detected CPU features. This is useful for determining + * whether a `vm.Script` `cachedData` buffer is compatible with this instance + * of V8. + * + * ```js + * console.log(v8.cachedDataVersionTag()); // 3947234607 + * // The value returned by v8.cachedDataVersionTag() is derived from the V8 + * // version, command-line flags, and detected CPU features. Test that the value + * // does indeed update when flags are toggled. + * v8.setFlagsFromString('--allow_natives_syntax'); + * console.log(v8.cachedDataVersionTag()); // 183726201 + * ``` + * @since v8.0.0 + */ + function cachedDataVersionTag(): number; + /** + * Returns an object with the following properties: + * + * `does_zap_garbage` is a 0/1 boolean, which signifies whether the`--zap_code_space` option is enabled or not. This makes V8 overwrite heap + * garbage with a bit pattern. The RSS footprint (resident set size) gets bigger + * because it continuously touches all heap pages and that makes them less likely + * to get swapped out by the operating system. + * + * `number_of_native_contexts` The value of native\_context is the number of the + * top-level contexts currently active. Increase of this number over time indicates + * a memory leak. + * + * `number_of_detached_contexts` The value of detached\_context is the number + * of contexts that were detached and not yet garbage collected. This number + * being non-zero indicates a potential memory leak. + * + * ```js + * { + * total_heap_size: 7326976, + * total_heap_size_executable: 4194304, + * total_physical_size: 7326976, + * total_available_size: 1152656, + * used_heap_size: 3476208, + * heap_size_limit: 1535115264, + * malloced_memory: 16384, + * peak_malloced_memory: 1127496, + * does_zap_garbage: 0, + * number_of_native_contexts: 1, + * number_of_detached_contexts: 0 + * } + * ``` + * @since v1.0.0 + */ + function getHeapStatistics(): HeapInfo; + /** + * Returns statistics about the V8 heap spaces, i.e. the segments which make up + * the V8 heap. Neither the ordering of heap spaces, nor the availability of a + * heap space can be guaranteed as the statistics are provided via the + * V8[`GetHeapSpaceStatistics`](https://v8docs.nodesource.com/node-13.2/d5/dda/classv8_1_1_isolate.html#ac673576f24fdc7a33378f8f57e1d13a4) function and may change from one V8 version to the + * next. + * + * The value returned is an array of objects containing the following properties: + * + * ```json + * [ + * { + * "space_name": "new_space", + * "space_size": 2063872, + * "space_used_size": 951112, + * "space_available_size": 80824, + * "physical_space_size": 2063872 + * }, + * { + * "space_name": "old_space", + * "space_size": 3090560, + * "space_used_size": 2493792, + * "space_available_size": 0, + * "physical_space_size": 3090560 + * }, + * { + * "space_name": "code_space", + * "space_size": 1260160, + * "space_used_size": 644256, + * "space_available_size": 960, + * "physical_space_size": 1260160 + * }, + * { + * "space_name": "map_space", + * "space_size": 1094160, + * "space_used_size": 201608, + * "space_available_size": 0, + * "physical_space_size": 1094160 + * }, + * { + * "space_name": "large_object_space", + * "space_size": 0, + * "space_used_size": 0, + * "space_available_size": 1490980608, + * "physical_space_size": 0 + * } + * ] + * ``` + * @since v6.0.0 + */ + function getHeapSpaceStatistics(): HeapSpaceInfo[]; + /** + * The `v8.setFlagsFromString()` method can be used to programmatically set + * V8 command-line flags. This method should be used with care. Changing settings + * after the VM has started may result in unpredictable behavior, including + * crashes and data loss; or it may simply do nothing. + * + * The V8 options available for a version of Node.js may be determined by running`node --v8-options`. + * + * Usage: + * + * ```js + * // Print GC events to stdout for one minute. + * const v8 = require('v8'); + * v8.setFlagsFromString('--trace_gc'); + * setTimeout(() => { v8.setFlagsFromString('--notrace_gc'); }, 60e3); + * ``` + * @since v1.0.0 + */ + function setFlagsFromString(flags: string): void; + /** + * Generates a snapshot of the current V8 heap and returns a Readable + * Stream that may be used to read the JSON serialized representation. + * This JSON stream format is intended to be used with tools such as + * Chrome DevTools. The JSON schema is undocumented and specific to the + * V8 engine. Therefore, the schema may change from one version of V8 to the next. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * // Print heap snapshot to the console + * const v8 = require('v8'); + * const stream = v8.getHeapSnapshot(); + * stream.pipe(process.stdout); + * ``` + * @since v11.13.0 + * @return A Readable Stream containing the V8 heap snapshot + */ + function getHeapSnapshot(): Readable; + /** + * Generates a snapshot of the current V8 heap and writes it to a JSON + * file. This file is intended to be used with tools such as Chrome + * DevTools. The JSON schema is undocumented and specific to the V8 + * engine, and may change from one version of V8 to the next. + * + * A heap snapshot is specific to a single V8 isolate. When using `worker threads`, a heap snapshot generated from the main thread will + * not contain any information about the workers, and vice versa. + * + * Creating a heap snapshot requires memory about twice the size of the heap at + * the time the snapshot is created. This results in the risk of OOM killers + * terminating the process. + * + * Generating a snapshot is a synchronous operation which blocks the event loop + * for a duration depending on the heap size. + * + * ```js + * const { writeHeapSnapshot } = require('v8'); + * const { + * Worker, + * isMainThread, + * parentPort + * } = require('worker_threads'); + * + * if (isMainThread) { + * const worker = new Worker(__filename); + * + * worker.once('message', (filename) => { + * console.log(`worker heapdump: ${filename}`); + * // Now get a heapdump for the main thread. + * console.log(`main thread heapdump: ${writeHeapSnapshot()}`); + * }); + * + * // Tell the worker to create a heapdump. + * worker.postMessage('heapdump'); + * } else { + * parentPort.once('message', (message) => { + * if (message === 'heapdump') { + * // Generate a heapdump for the worker + * // and return the filename to the parent. + * parentPort.postMessage(writeHeapSnapshot()); + * } + * }); + * } + * ``` + * @since v11.13.0 + * @param filename The file path where the V8 heap snapshot is to be saved. If not specified, a file name with the pattern `'Heap-${yyyymmdd}-${hhmmss}-${pid}-${thread_id}.heapsnapshot'` will be + * generated, where `{pid}` will be the PID of the Node.js process, `{thread_id}` will be `0` when `writeHeapSnapshot()` is called from the main Node.js thread or the id of a + * worker thread. + * @return The filename where the snapshot was saved. + */ + function writeHeapSnapshot(filename?: string): string; + /** + * Returns an object with the following properties: + * + * ```js + * { + * code_and_metadata_size: 212208, + * bytecode_and_metadata_size: 161368, + * external_script_source_size: 1410794 + * } + * ``` + * @since v12.8.0 + */ + function getHeapCodeStatistics(): HeapCodeStatistics; + /** + * @since v8.0.0 + */ + class Serializer { + /** + * Writes out a header, which includes the serialization format version. + */ + writeHeader(): void; + /** + * Serializes a JavaScript value and adds the serialized representation to the + * internal buffer. + * + * This throws an error if `value` cannot be serialized. + */ + writeValue(val: any): boolean; + /** + * Returns the stored internal buffer. This serializer should not be used once + * the buffer is released. Calling this method results in undefined behavior + * if a previous write has failed. + */ + releaseBuffer(): Buffer; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the deserializing context to `deserializer.transferArrayBuffer()`. + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Write a raw 32-bit unsigned integer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint32(value: number): void; + /** + * Write a raw 64-bit unsigned integer, split into high and low 32-bit parts. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeUint64(hi: number, lo: number): void; + /** + * Write a JS `number` value. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeDouble(value: number): void; + /** + * Write raw bytes into the serializer’s internal buffer. The deserializer + * will require a way to compute the length of the buffer. + * For use inside of a custom `serializer._writeHostObject()`. + */ + writeRawBytes(buffer: NodeJS.TypedArray): void; + } + /** + * A subclass of `Serializer` that serializes `TypedArray`(in particular `Buffer`) and `DataView` objects as host objects, and only + * stores the part of their underlying `ArrayBuffer`s that they are referring to. + * @since v8.0.0 + */ + class DefaultSerializer extends Serializer {} + /** + * @since v8.0.0 + */ + class Deserializer { + constructor(data: NodeJS.TypedArray); + /** + * Reads and validates a header (including the format version). + * May, for example, reject an invalid or unsupported wire format. In that case, + * an `Error` is thrown. + */ + readHeader(): boolean; + /** + * Deserializes a JavaScript value from the buffer and returns it. + */ + readValue(): any; + /** + * Marks an `ArrayBuffer` as having its contents transferred out of band. + * Pass the corresponding `ArrayBuffer` in the serializing context to `serializer.transferArrayBuffer()` (or return the `id` from `serializer._getSharedArrayBufferId()` in the case of + * `SharedArrayBuffer`s). + * @param id A 32-bit unsigned integer. + * @param arrayBuffer An `ArrayBuffer` instance. + */ + transferArrayBuffer(id: number, arrayBuffer: ArrayBuffer): void; + /** + * Reads the underlying wire format version. Likely mostly to be useful to + * legacy code reading old wire format versions. May not be called before`.readHeader()`. + */ + getWireFormatVersion(): number; + /** + * Read a raw 32-bit unsigned integer and return it. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint32(): number; + /** + * Read a raw 64-bit unsigned integer and return it as an array `[hi, lo]`with two 32-bit unsigned integer entries. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readUint64(): [number, number]; + /** + * Read a JS `number` value. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readDouble(): number; + /** + * Read raw bytes from the deserializer’s internal buffer. The `length` parameter + * must correspond to the length of the buffer that was passed to `serializer.writeRawBytes()`. + * For use inside of a custom `deserializer._readHostObject()`. + */ + readRawBytes(length: number): Buffer; + } + /** + * A subclass of `Deserializer` corresponding to the format written by `DefaultSerializer`. + * @since v8.0.0 + */ + class DefaultDeserializer extends Deserializer {} + /** + * Uses a `DefaultSerializer` to serialize `value` into a buffer. + * + * `ERR_BUFFER_TOO_LARGE` will be thrown when trying to + * serialize a huge object which requires buffer + * larger than `buffer.constants.MAX_LENGTH`. + * @since v8.0.0 + */ + function serialize(value: any): Buffer; + /** + * Uses a `DefaultDeserializer` with default options to read a JS value + * from a buffer. + * @since v8.0.0 + * @param buffer A buffer returned by {@link serialize}. + */ + function deserialize(buffer: NodeJS.TypedArray): any; + /** + * The `v8.takeCoverage()` method allows the user to write the coverage started by `NODE_V8_COVERAGE` to disk on demand. This method can be invoked multiple + * times during the lifetime of the process. Each time the execution counter will + * be reset and a new coverage report will be written to the directory specified + * by `NODE_V8_COVERAGE`. + * + * When the process is about to exit, one last coverage will still be written to + * disk unless {@link stopCoverage} is invoked before the process exits. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function takeCoverage(): void; + /** + * The `v8.stopCoverage()` method allows the user to stop the coverage collection + * started by `NODE_V8_COVERAGE`, so that V8 can release the execution count + * records and optimize code. This can be used in conjunction with {@link takeCoverage} if the user wants to collect the coverage on demand. + * @since v15.1.0, v14.18.0, v12.22.0 + */ + function stopCoverage(): void; +} +declare module 'node:v8' { + export * from 'v8'; +} diff --git a/node_modules/@types/node/ts4.8/vm.d.ts b/node_modules/@types/node/ts4.8/vm.d.ts new file mode 100755 index 0000000..c96513a --- /dev/null +++ b/node_modules/@types/node/ts4.8/vm.d.ts @@ -0,0 +1,509 @@ +/** + * The `vm` module enables compiling and running code within V8 Virtual + * Machine contexts. + * + * **The `vm` module is not a security** + * **mechanism. Do not use it to run untrusted code.** + * + * JavaScript code can be compiled and run immediately or + * compiled, saved, and run later. + * + * A common use case is to run the code in a different V8 Context. This means + * invoked code has a different global object than the invoking code. + * + * One can provide the context by `contextifying` an + * object. The invoked code treats any property in the context like a + * global variable. Any changes to global variables caused by the invoked + * code are reflected in the context object. + * + * ```js + * const vm = require('vm'); + * + * const x = 1; + * + * const context = { x: 2 }; + * vm.createContext(context); // Contextify the object. + * + * const code = 'x += 40; var y = 17;'; + * // `x` and `y` are global variables in the context. + * // Initially, x has the value 2 because that is the value of context.x. + * vm.runInContext(code, context); + * + * console.log(context.x); // 42 + * console.log(context.y); // 17 + * + * console.log(x); // 1; y is not defined. + * ``` + * @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/vm.js) + */ +declare module 'vm' { + interface Context extends NodeJS.Dict {} + interface BaseOptions { + /** + * Specifies the filename used in stack traces produced by this script. + * Default: `''`. + */ + filename?: string | undefined; + /** + * Specifies the line number offset that is displayed in stack traces produced by this script. + * Default: `0`. + */ + lineOffset?: number | undefined; + /** + * Specifies the column number offset that is displayed in stack traces produced by this script. + * @default 0 + */ + columnOffset?: number | undefined; + } + interface ScriptOptions extends BaseOptions { + displayErrors?: boolean | undefined; + timeout?: number | undefined; + cachedData?: Buffer | undefined; + /** @deprecated in favor of `script.createCachedData()` */ + produceCachedData?: boolean | undefined; + } + interface RunningScriptOptions extends BaseOptions { + /** + * When `true`, if an `Error` occurs while compiling the `code`, the line of code causing the error is attached to the stack trace. + * Default: `true`. + */ + displayErrors?: boolean | undefined; + /** + * Specifies the number of milliseconds to execute code before terminating execution. + * If execution is terminated, an `Error` will be thrown. This value must be a strictly positive integer. + */ + timeout?: number | undefined; + /** + * If `true`, the execution will be terminated when `SIGINT` (Ctrl+C) is received. + * Existing handlers for the event that have been attached via `process.on('SIGINT')` will be disabled during script execution, but will continue to work after that. + * If execution is terminated, an `Error` will be thrown. + * Default: `false`. + */ + breakOnSigint?: boolean | undefined; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: 'afterEvaluate' | undefined; + } + interface CompileFunctionOptions extends BaseOptions { + /** + * Provides an optional data with V8's code cache data for the supplied source. + */ + cachedData?: Buffer | undefined; + /** + * Specifies whether to produce new cache data. + * Default: `false`, + */ + produceCachedData?: boolean | undefined; + /** + * The sandbox/context in which the said function should be compiled in. + */ + parsingContext?: Context | undefined; + /** + * An array containing a collection of context extensions (objects wrapping the current scope) to be applied while compiling + */ + contextExtensions?: Object[] | undefined; + } + interface CreateContextOptions { + /** + * Human-readable name of the newly created context. + * @default 'VM Context i' Where i is an ascending numerical index of the created context. + */ + name?: string | undefined; + /** + * Corresponds to the newly created context for display purposes. + * The origin should be formatted like a `URL`, but with only the scheme, host, and port (if necessary), + * like the value of the `url.origin` property of a URL object. + * Most notably, this string should omit the trailing slash, as that denotes a path. + * @default '' + */ + origin?: string | undefined; + codeGeneration?: + | { + /** + * If set to false any calls to eval or function constructors (Function, GeneratorFunction, etc) + * will throw an EvalError. + * @default true + */ + strings?: boolean | undefined; + /** + * If set to false any attempt to compile a WebAssembly module will throw a WebAssembly.CompileError. + * @default true + */ + wasm?: boolean | undefined; + } + | undefined; + /** + * If set to `afterEvaluate`, microtasks will be run immediately after the script has run. + */ + microtaskMode?: 'afterEvaluate' | undefined; + } + type MeasureMemoryMode = 'summary' | 'detailed'; + interface MeasureMemoryOptions { + /** + * @default 'summary' + */ + mode?: MeasureMemoryMode | undefined; + context?: Context | undefined; + } + interface MemoryMeasurement { + total: { + jsMemoryEstimate: number; + jsMemoryRange: [number, number]; + }; + } + /** + * Instances of the `vm.Script` class contain precompiled scripts that can be + * executed in specific contexts. + * @since v0.3.1 + */ + class Script { + constructor(code: string, options?: ScriptOptions); + /** + * Runs the compiled code contained by the `vm.Script` object within the given`contextifiedObject` and returns the result. Running code does not have access + * to local scope. + * + * The following example compiles code that increments a global variable, sets + * the value of another global variable, then execute the code multiple times. + * The globals are contained in the `context` object. + * + * ```js + * const vm = require('vm'); + * + * const context = { + * animal: 'cat', + * count: 2 + * }; + * + * const script = new vm.Script('count += 1; name = "kitty";'); + * + * vm.createContext(context); + * for (let i = 0; i < 10; ++i) { + * script.runInContext(context); + * } + * + * console.log(context); + * // Prints: { animal: 'cat', count: 12, name: 'kitty' } + * ``` + * + * Using the `timeout` or `breakOnSigint` options will result in new event loops + * and corresponding threads being started, which have a non-zero performance + * overhead. + * @since v0.3.1 + * @param contextifiedObject A `contextified` object as returned by the `vm.createContext()` method. + * @return the result of the very last statement executed in the script. + */ + runInContext(contextifiedObject: Context, options?: RunningScriptOptions): any; + /** + * First contextifies the given `contextObject`, runs the compiled code contained + * by the `vm.Script` object within the created context, and returns the result. + * Running code does not have access to local scope. + * + * The following example compiles code that sets a global variable, then executes + * the code multiple times in different contexts. The globals are set on and + * contained within each individual `context`. + * + * ```js + * const vm = require('vm'); + * + * const script = new vm.Script('globalVar = "set"'); + * + * const contexts = [{}, {}, {}]; + * contexts.forEach((context) => { + * script.runInNewContext(context); + * }); + * + * console.log(contexts); + * // Prints: [{ globalVar: 'set' }, { globalVar: 'set' }, { globalVar: 'set' }] + * ``` + * @since v0.3.1 + * @param contextObject An object that will be `contextified`. If `undefined`, a new object will be created. + * @return the result of the very last statement executed in the script. + */ + runInNewContext(contextObject?: Context, options?: RunningScriptOptions): any; + /** + * Runs the compiled code contained by the `vm.Script` within the context of the + * current `global` object. Running code does not have access to local scope, but _does_ have access to the current `global` object. + * + * The following example compiles code that increments a `global` variable then + * executes that code multiple times: + * + * ```js + * const vm = require('vm'); + * + * global.globalVar = 0; + * + * const script = new vm.Script('globalVar += 1', { filename: 'myfile.vm' }); + * + * for (let i = 0; i < 1000; ++i) { + * script.runInThisContext(); + * } + * + * console.log(globalVar); + * + * // 1000 + * ``` + * @since v0.3.1 + * @return the result of the very last statement executed in the script. + */ + runInThisContext(options?: RunningScriptOptions): any; + /** + * Creates a code cache that can be used with the `Script` constructor's`cachedData` option. Returns a `Buffer`. This method may be called at any + * time and any number of times. + * + * ```js + * const script = new vm.Script(` + * function add(a, b) { + * return a + b; + * } + * + * const x = add(1, 2); + * `); + * + * const cacheWithoutX = script.createCachedData(); + * + * script.runInThisContext(); + * + * const cacheWithX = script.createCachedData(); + * ``` + * @since v10.6.0 + */ + createCachedData(): Buffer; + /** @deprecated in favor of `script.createCachedData()` */ + cachedDataProduced?: boolean | undefined; + cachedDataRejected?: boolean | undefined; + cachedData?: Buffer | undefined; + } + /** + * If given a `contextObject`, the `vm.createContext()` method will `prepare + * that object` so that it can be used in calls to {@link runInContext} or `script.runInContext()`. Inside such scripts, + * the `contextObject` will be the global object, retaining all of its existing + * properties but also having the built-in objects and functions any standard [global object](https://es5.github.io/#x15.1) has. Outside of scripts run by the vm module, global variables + * will remain unchanged. + * + * ```js + * const vm = require('vm'); + * + * global.globalVar = 3; + * + * const context = { globalVar: 1 }; + * vm.createContext(context); + * + * vm.runInContext('globalVar *= 2;', context); + * + * console.log(context); + * // Prints: { globalVar: 2 } + * + * console.log(global.globalVar); + * // Prints: 3 + * ``` + * + * If `contextObject` is omitted (or passed explicitly as `undefined`), a new, + * empty `contextified` object will be returned. + * + * The `vm.createContext()` method is primarily useful for creating a single + * context that can be used to run multiple scripts. For instance, if emulating a + * web browser, the method can be used to create a single context representing a + * window's global object, then run all ` +``` + + + +Node + + +Install with `npm install deprecation` + +```js +const { Deprecation } = require("deprecation"); +// or: import { Deprecation } from "deprecation"; +``` + + + + + +```js +function foo() { + bar(); +} + +function bar() { + baz(); +} + +function baz() { + console.warn(new Deprecation("[my-lib] foo() is deprecated, use bar()")); +} + +foo(); +// { Deprecation: [my-lib] foo() is deprecated, use bar() +// at baz (/path/to/file.js:12:15) +// at bar (/path/to/file.js:8:3) +// at foo (/path/to/file.js:4:3) +``` + +To log a deprecation message only once, you can use the [once](https://www.npmjs.com/package/once) module. + +```js +const Deprecation = require("deprecation"); +const once = require("once"); + +const deprecateFoo = once(console.warn); + +function foo() { + deprecateFoo(new Deprecation("[my-lib] foo() is deprecated, use bar()")); +} + +foo(); +foo(); // logs nothing +``` + +## License + +[ISC](LICENSE) diff --git a/node_modules/deprecation/dist-node/index.js b/node_modules/deprecation/dist-node/index.js new file mode 100644 index 0000000..9da1775 --- /dev/null +++ b/node_modules/deprecation/dist-node/index.js @@ -0,0 +1,20 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; diff --git a/node_modules/deprecation/dist-src/index.js b/node_modules/deprecation/dist-src/index.js new file mode 100644 index 0000000..7950fdc --- /dev/null +++ b/node_modules/deprecation/dist-src/index.js @@ -0,0 +1,14 @@ +export class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} \ No newline at end of file diff --git a/node_modules/deprecation/dist-types/index.d.ts b/node_modules/deprecation/dist-types/index.d.ts new file mode 100644 index 0000000..e3ae7ad --- /dev/null +++ b/node_modules/deprecation/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export class Deprecation extends Error { + name: "Deprecation"; +} diff --git a/node_modules/deprecation/dist-web/index.js b/node_modules/deprecation/dist-web/index.js new file mode 100644 index 0000000..c6bbda7 --- /dev/null +++ b/node_modules/deprecation/dist-web/index.js @@ -0,0 +1,16 @@ +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +export { Deprecation }; diff --git a/node_modules/deprecation/package.json b/node_modules/deprecation/package.json new file mode 100644 index 0000000..a45fd51 --- /dev/null +++ b/node_modules/deprecation/package.json @@ -0,0 +1,34 @@ +{ + "name": "deprecation", + "description": "Log a deprecation message with stack", + "version": "2.3.1", + "license": "ISC", + "files": [ + "dist-*/", + "bin/" + ], + "esnext": "dist-src/index.js", + "main": "dist-node/index.js", + "module": "dist-web/index.js", + "types": "dist-types/index.d.ts", + "pika": true, + "sideEffects": false, + "keywords": [ + "deprecate", + "deprecated", + "deprecation" + ], + "repository": { + "type": "git", + "url": "https://github.com/gr2m/deprecation.git" + }, + "dependencies": {}, + "devDependencies": { + "@pika/pack": "^0.3.7", + "@pika/plugin-build-node": "^0.4.0", + "@pika/plugin-build-types": "^0.4.0", + "@pika/plugin-build-web": "^0.4.0", + "@pika/plugin-standard-pkg": "^0.4.0", + "semantic-release": "^15.13.3" + } +} diff --git a/node_modules/dezalgo/.travis.yml b/node_modules/dezalgo/.travis.yml new file mode 100644 index 0000000..e1bcee1 --- /dev/null +++ b/node_modules/dezalgo/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +before_script: npm install -g npm@latest +node_js: + - '0.8' + - '0.10' + - '0.12' + - 'iojs' diff --git a/node_modules/dezalgo/LICENSE b/node_modules/dezalgo/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/dezalgo/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/dezalgo/README.md b/node_modules/dezalgo/README.md new file mode 100644 index 0000000..bdfc8ba --- /dev/null +++ b/node_modules/dezalgo/README.md @@ -0,0 +1,29 @@ +# dezalgo + +Contain async insanity so that the dark pony lord doesn't eat souls + +See [this blog +post](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony). + +## USAGE + +Pass a callback to `dezalgo` and it will ensure that it is *always* +called in a future tick, and never in this tick. + +```javascript +var dz = require('dezalgo') + +var cache = {} +function maybeSync(arg, cb) { + cb = dz(cb) + + // this will actually defer to nextTick + if (cache[arg]) cb(null, cache[arg]) + + fs.readFile(arg, function (er, data) { + // since this is *already* defered, it will call immediately + if (er) cb(er) + cb(null, cache[arg] = data) + }) +} +``` diff --git a/node_modules/dezalgo/dezalgo.js b/node_modules/dezalgo/dezalgo.js new file mode 100644 index 0000000..04fd3ba --- /dev/null +++ b/node_modules/dezalgo/dezalgo.js @@ -0,0 +1,22 @@ +var wrappy = require('wrappy') +module.exports = wrappy(dezalgo) + +var asap = require('asap') + +function dezalgo (cb) { + var sync = true + asap(function () { + sync = false + }) + + return function zalgoSafe() { + var args = arguments + var me = this + if (sync) + asap(function() { + cb.apply(me, args) + }) + else + cb.apply(me, args) + } +} diff --git a/node_modules/dezalgo/package.json b/node_modules/dezalgo/package.json new file mode 100644 index 0000000..634dd0d --- /dev/null +++ b/node_modules/dezalgo/package.json @@ -0,0 +1,42 @@ +{ + "name": "dezalgo", + "version": "1.0.3", + "description": "Contain async insanity so that the dark pony lord doesn't eat souls", + "main": "dezalgo.js", + "directories": { + "test": "test" + }, + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^1.2.0" + }, + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/dezalgo" + }, + "keywords": [ + "async", + "zalgo", + "the dark pony", + "he comes", + "asynchrony of all holy and good", + "T̯̪ͅo̯͖̹ ̻̮̖̲͢i̥̖n̢͈͇̝͍v͏͉ok̭̬̝ͅe̞͍̩̫͍̩͝ ̩̮̖̟͇͉́t͔͔͎̗h͏̗̟e̘͉̰̦̠̞͓ ͕h͉̟͎̪̠̱͠ḭ̮̩v̺͉͇̩e̵͖-̺̪m͍i̜n̪̲̲̲̮d̷ ̢r̠̼̯̹̦̦͘ͅe͓̳͓̙p̺̗̫͙͘ͅr͔̰͜e̴͓̞s͉̩̩͟ͅe͏̣n͚͇̗̭̺͍tì͙̣n͏̖̥̗͎̰̪g̞͓̭̱̯̫̕ ̣̱͜ͅc̦̰̰̠̮͎͙̀hao̺̜̻͍͙ͅs͉͓̘.͎̼̺̼͕̹͘", + "̠̞̱̰I͖͇̝̻n̦̰͍̰̟v̤̺̫̳̭̼̗͘ò̹̟̩̩͚k̢̥̠͍͉̦̬i̖͓͔̮̱̻͘n̶̳͙̫͎g̖̯̣̲̪͉ ̞͎̗͕͚ͅt̲͕̘̺̯̗̦h̘̦̲̜̻e̳͎͉̬͙ ̴̞̪̲̥f̜̯͓͓̭̭͢e̱̘͔̮e̜̤l̺̱͖̯͓͙͈͢i̵̦̬͉͔̫͚͕n͉g̨͖̙̙̹̹̟̤ ͉̪o̞̠͍̪̰͙ͅf̬̲̺ ͔͕̲͕͕̲̕c̙͉h̝͔̩̙̕ͅa̲͖̻̗̹o̥̼̫s̝̖̜̝͚̫̟.̺͚ ̸̱̲W̶̥̣͖̦i͏̤̬̱̳̣ͅt͉h̗̪̪ ̷̱͚̹̪ǫ͕̗̣̳̦͎u̼̦͔̥̮̕ţ͖͎̻͔͉ ̴͎̩òr̹̰̖͉͈͝d̷̲̦̖͓e̲͓̠r", + "̧͚̜͓̰̭̭Ṯ̫̹̜̮̟̮͝h͚̘̩̘̖̰́e ̥̘͓͉͔͙̼N̟̜̣̘͔̪e̞̞̤͢z̰̖̘͇p̠͟e̺̱̣͍͙̝ṛ̘̬͔̙͇̠d͝ḭ̯̱̥̗̩a̛ͅn͏̦ ̷̥hi̥v̖̳̹͉̮̱͝e̹̪̘̖̰̟-̴͙͓͚̜̻mi̗̺̻͙̺ͅn̪̯͈d ͏̘͓̫̳ͅơ̹͔̳̖̣͓f͈̹̘ ͕ͅc̗̤̠̜̮̥̥h̡͍̩̭̫͚̱a̤͉̤͔͜os͕̤̼͍̲̀ͅ.̡̱ ̦Za̯̱̗̭͍̣͚l̗͉̰̤g͏̣̭̬̗̲͖ͅo̶̭̩̳̟͈.̪̦̰̳", + "H̴̱̦̗̬̣͓̺e̮ ͉̠̰̞͎̖͟ẁh̛̺̯ͅo̖̫͡ ̢Ẁa̡̗i̸t͖̣͉̀ş͔̯̩ ̤̦̮͇̞̦̲B͎̭͇̦̼e̢hin͏͙̟̪d̴̰͓̻̣̮͕ͅ T͖̮̕h͖e̘̺̰̙͘ ̥Ẁ̦͔̻͚a̞͖̪͉l̪̠̻̰̣̠l̲͎͞", + "Z̘͍̼͎̣͔͝Ą̲̜̱̱̹̤͇L̶̝̰̭͔G͍̖͍O̫͜ͅ!̼̤ͅ", + "H̝̪̜͓̀̌̂̒E̢̙̠̣ ̴̳͇̥̟̠͍̐C̹̓̑̐̆͝Ó̶̭͓̚M̬̼Ĕ̖̤͔͔̟̹̽̿̊ͥ̍ͫS̻̰̦̻̖̘̱̒ͪ͌̅͟" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/dezalgo/issues" + }, + "homepage": "https://github.com/npm/dezalgo" +} diff --git a/node_modules/dezalgo/test/basic.js b/node_modules/dezalgo/test/basic.js new file mode 100644 index 0000000..da09e72 --- /dev/null +++ b/node_modules/dezalgo/test/basic.js @@ -0,0 +1,29 @@ +var test = require('tap').test +var dz = require('../dezalgo.js') + +test('the dark pony', function(t) { + + var n = 0 + function foo(i, cb) { + cb = dz(cb) + if (++n % 2) cb(true, i) + else process.nextTick(cb.bind(null, false, i)) + } + + var called = 0 + var order = [0, 2, 4, 6, 8, 1, 3, 5, 7, 9] + var o = 0 + for (var i = 0; i < 10; i++) { + foo(i, function(cached, i) { + t.equal(i, order[o++]) + t.equal(i % 2, cached ? 0 : 1) + called++ + }) + t.equal(called, 0) + } + + setTimeout(function() { + t.equal(called, 10) + t.end() + }) +}) diff --git a/node_modules/ee-first/LICENSE b/node_modules/ee-first/LICENSE new file mode 100644 index 0000000..a7ae8ee --- /dev/null +++ b/node_modules/ee-first/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong me@jongleberry.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ee-first/README.md b/node_modules/ee-first/README.md new file mode 100644 index 0000000..cbd2478 --- /dev/null +++ b/node_modules/ee-first/README.md @@ -0,0 +1,80 @@ +# EE First + +[![NPM version][npm-image]][npm-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] +[![Gittip][gittip-image]][gittip-url] + +Get the first event in a set of event emitters and event pairs, +then clean up after itself. + +## Install + +```sh +$ npm install ee-first +``` + +## API + +```js +var first = require('ee-first') +``` + +### first(arr, listener) + +Invoke `listener` on the first event from the list specified in `arr`. `arr` is +an array of arrays, with each array in the format `[ee, ...event]`. `listener` +will be called only once, the first time any of the given events are emitted. If +`error` is one of the listened events, then if that fires first, the `listener` +will be given the `err` argument. + +The `listener` is invoked as `listener(err, ee, event, args)`, where `err` is the +first argument emitted from an `error` event, if applicable; `ee` is the event +emitter that fired; `event` is the string event name that fired; and `args` is an +array of the arguments that were emitted on the event. + +```js +var ee1 = new EventEmitter() +var ee2 = new EventEmitter() + +first([ + [ee1, 'close', 'end', 'error'], + [ee2, 'error'] +], function (err, ee, event, args) { + // listener invoked +}) +``` + +#### .cancel() + +The group of listeners can be cancelled before being invoked and have all the event +listeners removed from the underlying event emitters. + +```js +var thunk = first([ + [ee1, 'close', 'end', 'error'], + [ee2, 'error'] +], function (err, ee, event, args) { + // listener invoked +}) + +// cancel and clean up +thunk.cancel() +``` + +[npm-image]: https://img.shields.io/npm/v/ee-first.svg?style=flat-square +[npm-url]: https://npmjs.org/package/ee-first +[github-tag]: http://img.shields.io/github/tag/jonathanong/ee-first.svg?style=flat-square +[github-url]: https://github.com/jonathanong/ee-first/tags +[travis-image]: https://img.shields.io/travis/jonathanong/ee-first.svg?style=flat-square +[travis-url]: https://travis-ci.org/jonathanong/ee-first +[coveralls-image]: https://img.shields.io/coveralls/jonathanong/ee-first.svg?style=flat-square +[coveralls-url]: https://coveralls.io/r/jonathanong/ee-first?branch=master +[license-image]: http://img.shields.io/npm/l/ee-first.svg?style=flat-square +[license-url]: LICENSE.md +[downloads-image]: http://img.shields.io/npm/dm/ee-first.svg?style=flat-square +[downloads-url]: https://npmjs.org/package/ee-first +[gittip-image]: https://img.shields.io/gittip/jonathanong.svg?style=flat-square +[gittip-url]: https://www.gittip.com/jonathanong/ diff --git a/node_modules/ee-first/index.js b/node_modules/ee-first/index.js new file mode 100644 index 0000000..501287c --- /dev/null +++ b/node_modules/ee-first/index.js @@ -0,0 +1,95 @@ +/*! + * ee-first + * Copyright(c) 2014 Jonathan Ong + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = first + +/** + * Get the first event in a set of event emitters and event pairs. + * + * @param {array} stuff + * @param {function} done + * @public + */ + +function first(stuff, done) { + if (!Array.isArray(stuff)) + throw new TypeError('arg must be an array of [ee, events...] arrays') + + var cleanups = [] + + for (var i = 0; i < stuff.length; i++) { + var arr = stuff[i] + + if (!Array.isArray(arr) || arr.length < 2) + throw new TypeError('each array member must be [ee, events...]') + + var ee = arr[0] + + for (var j = 1; j < arr.length; j++) { + var event = arr[j] + var fn = listener(event, callback) + + // listen to the event + ee.on(event, fn) + // push this listener to the list of cleanups + cleanups.push({ + ee: ee, + event: event, + fn: fn, + }) + } + } + + function callback() { + cleanup() + done.apply(null, arguments) + } + + function cleanup() { + var x + for (var i = 0; i < cleanups.length; i++) { + x = cleanups[i] + x.ee.removeListener(x.event, x.fn) + } + } + + function thunk(fn) { + done = fn + } + + thunk.cancel = cleanup + + return thunk +} + +/** + * Create the event listener. + * @private + */ + +function listener(event, done) { + return function onevent(arg1) { + var args = new Array(arguments.length) + var ee = this + var err = event === 'error' + ? arg1 + : null + + // copy args to prevent arguments escaping scope + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + + done(err, ee, event, args) + } +} diff --git a/node_modules/ee-first/package.json b/node_modules/ee-first/package.json new file mode 100644 index 0000000..b6d0b7d --- /dev/null +++ b/node_modules/ee-first/package.json @@ -0,0 +1,29 @@ +{ + "name": "ee-first", + "description": "return the first event in a set of ee/event pairs", + "version": "1.1.1", + "author": { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com", + "twitter": "https://twitter.com/jongleberry" + }, + "contributors": [ + "Douglas Christopher Wilson " + ], + "license": "MIT", + "repository": "jonathanong/ee-first", + "devDependencies": { + "istanbul": "0.3.9", + "mocha": "2.2.5" + }, + "files": [ + "index.js", + "LICENSE" + ], + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/eventsource/.editorconfig b/node_modules/eventsource/.editorconfig new file mode 100644 index 0000000..88f10ae --- /dev/null +++ b/node_modules/eventsource/.editorconfig @@ -0,0 +1,27 @@ +# http://editorconfig.org +root = true + +[*] +# Use hard or soft tabs +indent_style = space + +# Size of a single indent +indent_size = tab + +# Number of columns representing a tab character +tab_width = 2 + +# Use line-feed as EOL indicator +end_of_line = lf + +# Use UTF-8 character encoding for all files +charset = utf-8 + +# Remove any whitespace characters preceding newline characters +trim_trailing_whitespace = true + +# Ensure file ends with a newline when saving +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/node_modules/eventsource/CONTRIBUTING.md b/node_modules/eventsource/CONTRIBUTING.md new file mode 100644 index 0000000..f66db3b --- /dev/null +++ b/node_modules/eventsource/CONTRIBUTING.md @@ -0,0 +1,13 @@ +# Contributing to EventSource + +If you add or fix something, add tests. + +## Release process + +Update `HISTORY.md`, Then: + + npm outdated --depth 0 # See if you can upgrade something + npm run polyfill + git commit ... + npm version [major|minor|patch] + npm publish diff --git a/node_modules/eventsource/HISTORY.md b/node_modules/eventsource/HISTORY.md new file mode 100644 index 0000000..d5e662a --- /dev/null +++ b/node_modules/eventsource/HISTORY.md @@ -0,0 +1,161 @@ +# [2.0.2](https://github.com/EventSource/eventsource/compare/v2.0.1...v2.0.2) + +* Do not include authorization and cookie headers on redirect to different origin ([#273](https://github.com/EventSource/eventsource/pull/273) Espen Hovlandsdal) + +# [2.0.1](https://github.com/EventSource/eventsource/compare/v2.0.0...v2.0.1) + +* Fix `URL is not a constructor` error for browser ([#268](https://github.com/EventSource/eventsource/pull/268) Ajinkya Rajput) + +# [2.0.0](https://github.com/EventSource/eventsource/compare/v1.1.0...v2.0.0) + +* BREAKING: Node >= 12 now required ([#152](https://github.com/EventSource/eventsource/pull/152) @HonkingGoose) +* Preallocate buffer size when reading data for increased performance with large messages ([#239](https://github.com/EventSource/eventsource/pull/239) Pau Freixes) +* Removed dependency on url-parser. Fixes [CVE-2022-0512](https://www.whitesourcesoftware.com/vulnerability-database/CVE-2022-0512) & [CVE-2022-0691](https://nvd.nist.gov/vuln/detail/CVE-2022-0691) ([#249](https://github.com/EventSource/eventsource/pull/249) Alex Hladin) + +# [1.1.1](https://github.com/EventSource/eventsource/compare/v1.1.0...v1.1.1) + +* Do not include authorization and cookie headers on redirect to different origin ([#273](https://github.com/EventSource/eventsource/pull/273) Espen Hovlandsdal) + +# [1.1.0](https://github.com/EventSource/eventsource/compare/v1.0.7...v1.1.0) + +* Improve performance for large messages across many chunks ([#130](https://github.com/EventSource/eventsource/pull/130) Trent Willis) +* Add `createConnection` option for http or https requests ([#120](https://github.com/EventSource/eventsource/pull/120) Vasily Lavrov) +* Support HTTP 302 redirects ([#116](https://github.com/EventSource/eventsource/pull/116) Ryan Bonte) +* Prevent sequential errors from attempting multiple reconnections ([#125](https://github.com/EventSource/eventsource/pull/125) David Patty) +* Add `new` to correct test ([#111](https://github.com/EventSource/eventsource/pull/101) Stéphane Alnet) +* Fix reconnections attempts now happen more than once ([#136](https://github.com/EventSource/eventsource/pull/136) Icy Fish) + +# [1.0.7](https://github.com/EventSource/eventsource/compare/v1.0.6...v1.0.7) + +* Add dispatchEvent to EventSource ([#101](https://github.com/EventSource/eventsource/pull/101) Ali Afroozeh) +* Added `checkServerIdentity` option ([#104](https://github.com/EventSource/eventsource/pull/104) cintolas) +* Surface request error message ([#107](https://github.com/EventSource/eventsource/pull/107) RasPhilCo) + +# [1.0.6](https://github.com/EventSource/eventsource/compare/v1.0.5...v1.0.6) + +* Fix issue where a unicode sequence split in two chunks would lead to invalid messages ([#108](https://github.com/EventSource/eventsource/pull/108) Espen Hovlandsdal) +* Change example to use `eventsource/ssestream` (Aslak Hellesøy) + +# [1.0.5](https://github.com/EventSource/eventsource/compare/v1.0.4...v1.0.5) + +* Check for `window` existing before polyfilling. ([#80](https://github.com/EventSource/eventsource/pull/80) Neftaly Hernandez) + +# [1.0.4](https://github.com/EventSource/eventsource/compare/v1.0.2...v1.0.4) + +* Pass withCredentials on to the XHR. ([#79](https://github.com/EventSource/eventsource/pull/79) Ken Mayer) + +# [1.0.2](https://github.com/EventSource/eventsource/compare/v1.0.1...v1.0.2) + +* Fix proxy not working when proxy and target URL uses different protocols. ([#76](https://github.com/EventSource/eventsource/pull/76) Espen Hovlandsdal) +* Make `close()` a prototype method instead of an instance method. ([#77](https://github.com/EventSource/eventsource/pull/77) Espen Hovlandsdal) + +# [1.0.1](https://github.com/EventSource/eventsource/compare/v1.0.0...v1.0.1) + +* Reconnect if server responds with HTTP 500, 502, 503 or 504. ([#74](https://github.com/EventSource/eventsource/pull/74) Vykintas Narmontas) + +# [1.0.0](https://github.com/EventSource/eventsource/compare/v0.2.3...v1.0.0) + +* Add missing `removeEventListener`-method. ([#51](https://github.com/EventSource/eventsource/pull/51) Yucheng Tu / Espen Hovlandsdal) +* Fix EventSource reconnecting on non-200 responses. ([af84476](https://github.com/EventSource/eventsource/commit/af84476b519a01e61b8c80727261df52ae40022c) Espen Hovlandsdal) +* Add ability to customize https options. ([#53](https://github.com/EventSource/eventsource/pull/53) Rafael Alfaro) +* Add readyState constants to EventSource instances. ([#66](https://github.com/EventSource/eventsource/pull/66) Espen Hovlandsdal) + +# [0.2.3](https://github.com/EventSource/eventsource/compare/v0.2.2...v0.2.3) + +* Fix `onConnectionClosed` firing multiple times resulting in multiple connections. ([#61](https://github.com/EventSource/eventsource/pull/61) Phil Strong / Duncan Wong) +* Remove unneeded isPlainObject check for headers. ([#64](https://github.com/EventSource/eventsource/pull/64) David Mark) + +# [0.2.2](https://github.com/EventSource/eventsource/compare/v0.2.1...v0.2.2) + +* Don't include test files in npm package. ([#56](https://github.com/EventSource/eventsource/pull/56) eanplatter) + +# [0.2.1](https://github.com/EventSource/eventsource/compare/v0.2.0...v0.2.1) + +* Fix `close()` for polyfill. ([#52](https://github.com/EventSource/eventsource/pull/52) brian-medendorp) +* Add http/https proxy function. ([#46](https://github.com/EventSource/eventsource/pull/46) Eric Lu) +* Fix reconnect for polyfill. Only disable reconnect when server status is 204. (Aslak Hellesøy). +* Drop support for Node 0.10.x and older (Aslak Hellesøy). + +# [0.2.0](https://github.com/EventSource/eventsource/compare/v0.1.6...v0.2.0) + +* Renamed repository to `eventsource` (since it's not just Node, but also browser polyfill). (Aslak Hellesøy). +* Compatibility with webpack/browserify. ([#44](https://github.com/EventSource/eventsource/pull/44) Adriano Raiano). + +# [0.1.6](https://github.com/EventSource/eventsource/compare/v0.1.5...v0.1.6) + +* Ignore headers without a value. ([#41](https://github.com/EventSource/eventsource/issues/41), [#43](https://github.com/EventSource/eventsource/pull/43) Adriano Raiano) + +# [0.1.5](https://github.com/EventSource/eventsource/compare/v0.1.4...v0.1.5) + +* Refactor tests to support Node.js 0.12.0 and Io.js 1.1.0. (Aslak Hellesøy) + +# [0.1.4](https://github.com/EventSource/eventsource/compare/v0.1.3...master) + +* Bugfix: Added missing origin property. ([#39](https://github.com/EventSource/eventsource/pull/39), [#38](https://github.com/EventSource/eventsource/issues/38) Arnout Kazemier) +* Expose `status` property on `error` events. ([#40](https://github.com/EventSource/eventsource/pull/40) Adriano Raiano) + +# [0.1.3](https://github.com/EventSource/eventsource/compare/v0.1.2...v0.1.3) + +* Bugfix: Made message properties enumerable. ([#37](https://github.com/EventSource/eventsource/pull/37) Golo Roden) + +# [0.1.2](https://github.com/EventSource/eventsource/compare/v0.1.1...v0.1.2) + +* Bugfix: Blank lines not read. ([#35](https://github.com/EventSource/eventsource/issues/35), [#36](https://github.com/EventSource/eventsource/pull/36) Lesterpig) + +# [0.1.1](https://github.com/EventSource/eventsource/compare/v0.1.0...v0.1.1) + +* Bugfix: Fix message type. ([#33](https://github.com/EventSource/eventsource/pull/33) Romain Gauthier) + +# [0.1.0](https://github.com/EventSource/eventsource/compare/v0.0.10...v0.1.0) + +* Bugfix: High CPU usage by replacing Jison with port of WebKit's parser. ([#25](https://github.com/EventSource/eventsource/issues/25), [#32](https://github.com/EventSource/eventsource/pull/32), [#18](https://github.com/EventSource/eventsource/issues/18) qqueue) +* Reformatted all code to 2 spaces. + +# [0.0.10](https://github.com/EventSource/eventsource/compare/v0.0.9...v0.0.10) + +* Provide `Event` argument on `open` and `error` event ([#30](https://github.com/EventSource/eventsource/issues/30), [#31](https://github.com/EventSource/eventsource/pull/31) Donghwan Kim) +* Expose `lastEventId` on messages. ([#28](https://github.com/EventSource/eventsource/pull/28) mbieser) + +# [0.0.9](https://github.com/EventSource/eventsource/compare/v0.0.8...v0.0.9) + +* Bugfix: old "last-event-id" used on reconnect ([#27](https://github.com/EventSource/eventsource/pull/27) Aslak Hellesøy) + +# [0.0.8](https://github.com/EventSource/eventsource/compare/v0.0.7...v0.0.8) + +* Bugfix: EventSource still reconnected when closed ([#24](https://github.com/EventSource/eventsource/pull/24) FrozenCow) +* Allow unauthorized HTTPS connections by setting `rejectUnauthorized` to false. (Aslak Hellesøy) + +# [0.0.7](https://github.com/EventSource/eventsource/compare/v0.0.6...v0.0.7) + +* Explicitly raise an error when server returns http 403 and don't continue ([#20](https://github.com/EventSource/eventsource/pull/20) Scott Moak) +* Added ability to send custom http headers to server ([#21](https://github.com/EventSource/eventsource/pull/21), [#9](https://github.com/EventSource/eventsource/issues/9) Scott Moak) +* Fix Unicode support to cope with Javascript Unicode size limitations ([#23](https://github.com/EventSource/eventsource/pull/23), [#22](https://github.com/EventSource/eventsource/issues/22) Devon Adkisson) +* Graceful handling of parse errors ([#19](https://github.com/EventSource/eventsource/issues/19) Aslak Hellesøy) +* Switched from testing with Nodeunit to Mocha (Aslak Hellesøy) + +# [0.0.6](https://github.com/EventSource/eventsource/compare/v0.0.5...v0.0.6) + +* Add Accept: text/event-stream header ([#17](https://github.com/EventSource/eventsource/pull/17) William Wicks) + +# [0.0.5](https://github.com/EventSource/eventsource/compare/v0.0.4...v0.0.5) + +* Add no-cache and https support ([#10](https://github.com/EventSource/eventsource/pull/10) Einar Otto Stangvik) +* Ensure that Last-Event-ID is sent to the server for reconnects, as defined in the spec ([#8](https://github.com/EventSource/eventsource/pull/8) Einar Otto Stangvik) +* Verify that CR and CRLF are accepted alongside LF ([#7](https://github.com/EventSource/eventsource/pull/7) Einar Otto Stangvik) +* Emit 'open' event ([#4](https://github.com/EventSource/eventsource/issues/4) Einar Otto Stangvik) + +# [0.0.4](https://github.com/EventSource/eventsource/compare/v0.0.3...v0.0.4) + +* Automatic reconnect every second if the server is down. Reconnect interval can be set with `reconnectInterval` (not in W3C spec). (Aslak Hellesøy) + +# [0.0.3](https://github.com/EventSource/eventsource/compare/v0.0.2...v0.0.3) + +* Jison based eventstream parser ([#2](https://github.com/EventSource/eventsource/pull/2) Einar Otto Stangvik) + +# [0.0.2](https://github.com/EventSource/eventsource/compare/v0.0.1...v0.0.2) + +* Use native EventListener (Aslak Hellesøy) + +# 0.0.1 + +* First release diff --git a/node_modules/eventsource/LICENSE b/node_modules/eventsource/LICENSE new file mode 100644 index 0000000..505b0d9 --- /dev/null +++ b/node_modules/eventsource/LICENSE @@ -0,0 +1,22 @@ +The MIT License + +Copyright (c) EventSource GitHub organisation + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/eventsource/README.md b/node_modules/eventsource/README.md new file mode 100644 index 0000000..a6b6d2f --- /dev/null +++ b/node_modules/eventsource/README.md @@ -0,0 +1,91 @@ +# EventSource [![npm version](http://img.shields.io/npm/v/eventsource.svg?style=flat-square)](https://www.npmjs.com/package/eventsource)[![NPM Downloads](https://img.shields.io/npm/dm/eventsource.svg?style=flat-square)](http://npm-stat.com/charts.html?package=eventsource&from=2015-09-01)[![Dependencies](https://img.shields.io/david/EventSource/eventsource.svg?style=flat-square)](https://david-dm.org/EventSource/eventsource) + +![Build](https://github.com/EventSource/eventsource/actions/workflows/build.yml/badge.svg) + +This library is a pure JavaScript implementation of the [EventSource](https://html.spec.whatwg.org/multipage/server-sent-events.html#server-sent-events) client. The API aims to be W3C compatible. + +You can use it with Node.js or as a browser polyfill for +[browsers that don't have native `EventSource` support](http://caniuse.com/#feat=eventsource). + +## Install + + npm install eventsource + +## Example + + npm install + node ./example/sse-server.js + node ./example/sse-client.js # Node.js client + open http://localhost:8080 # Browser client - both native and polyfill + curl http://localhost:8080/sse # Enjoy the simplicity of SSE + +## Browser Polyfill + +Just add `example/eventsource-polyfill.js` file to your web page: + +```html + +``` + +Now you will have two global constructors: + +```javascript +window.EventSourcePolyfill +window.EventSource // Unchanged if browser has defined it. Otherwise, same as window.EventSourcePolyfill +``` + +If you're using [webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) +you can of course build your own. (The `example/eventsource-polyfill.js` is built with webpack). + +## Extensions to the W3C API + +### Setting HTTP request headers + +You can define custom HTTP headers for the initial HTTP request. This can be useful for e.g. sending cookies +or to specify an initial `Last-Event-ID` value. + +HTTP headers are defined by assigning a `headers` attribute to the optional `eventSourceInitDict` argument: + +```javascript +var eventSourceInitDict = {headers: {'Cookie': 'test=test'}}; +var es = new EventSource(url, eventSourceInitDict); +``` + +### Allow unauthorized HTTPS requests + +By default, https requests that cannot be authorized will cause the connection to fail and an exception +to be emitted. You can override this behaviour, along with other https options: + +```javascript +var eventSourceInitDict = {https: {rejectUnauthorized: false}}; +var es = new EventSource(url, eventSourceInitDict); +``` + +Note that for Node.js < v0.10.x this option has no effect - unauthorized HTTPS requests are *always* allowed. + +### HTTP status code on error events + +Unauthorized and redirect error status codes (for example 401, 403, 301, 307) are available in the `status` property in the error event. + +```javascript +es.onerror = function (err) { + if (err) { + if (err.status === 401 || err.status === 403) { + console.log('not authorized'); + } + } +}; +``` + +### HTTP/HTTPS proxy + +You can define a `proxy` option for the HTTP request to be used. This is typically useful if you are behind a corporate firewall. + +```javascript +var es = new EventSource(url, {proxy: 'http://your.proxy.com'}); +``` + + +## License + +MIT-licensed. See LICENSE diff --git a/node_modules/eventsource/example/eventsource-polyfill.js b/node_modules/eventsource/example/eventsource-polyfill.js new file mode 100644 index 0000000..50fda2c --- /dev/null +++ b/node_modules/eventsource/example/eventsource-polyfill.js @@ -0,0 +1,9736 @@ +/******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { +/******/ configurable: false, +/******/ enumerable: true, +/******/ get: getter +/******/ }); +/******/ } +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 21); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports) { + +var g; + +// This works in non-strict mode +g = (function() { + return this; +})(); + +try { + // This works if eval is allowed (see CSP) + g = g || Function("return this")() || (1,eval)("this"); +} catch(e) { + // This works if the window reference is available + if(typeof window === "object") + g = window; +} + +// g can still be undefined, but nothing to do about it... +// We return undefined, instead of nothing here, so it's +// easier to handle this case. if(!global) { ...} + +module.exports = g; + + +/***/ }), +/* 1 */ +/***/ (function(module, exports) { + +// shim for using process in browser +var process = module.exports = {}; + +// cached from whatever global is present so that test runners that stub it +// don't break things. But we need to wrap it in a try catch in case it is +// wrapped in strict mode code which doesn't define any globals. It's inside a +// function because try/catches deoptimize in certain engines. + +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} +function defaultClearTimeout () { + throw new Error('clearTimeout has not been defined'); +} +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +} ()) +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } + // if setTimeout wasn't available but was latter defined + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch(e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch(e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + + +} +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } + // if clearTimeout wasn't available but was latter defined + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + + + +} +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + draining = false; + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + var timeout = runTimeout(cleanUpNextTick); + draining = true; + + var len = queue.length; + while(len) { + currentQueue = queue; + queue = []; + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + queueIndex = -1; + len = queue.length; + } + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + queue.push(new Item(fun, args)); + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +// v8 likes predictible objects +function Item(fun, array) { + this.fun = fun; + this.array = array; +} +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; // empty string to avoid regexp issues +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; +process.prependListener = noop; +process.prependOnceListener = noop; + +process.listeners = function (name) { return [] } + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { return '/' }; +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; +process.umask = function() { return 0; }; + + +/***/ }), +/* 2 */ +/***/ (function(module, exports) { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} + + +/***/ }), +/* 3 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(global) {/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +/* eslint-disable no-proto */ + + + +var base64 = __webpack_require__(23) +var ieee754 = __webpack_require__(24) +var isArray = __webpack_require__(10) + +exports.Buffer = Buffer +exports.SlowBuffer = SlowBuffer +exports.INSPECT_MAX_BYTES = 50 + +/** + * If `Buffer.TYPED_ARRAY_SUPPORT`: + * === true Use Uint8Array implementation (fastest) + * === false Use Object implementation (most compatible, even IE6) + * + * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, + * Opera 11.6+, iOS 4.2+. + * + * Due to various browser bugs, sometimes the Object implementation will be used even + * when the browser supports typed arrays. + * + * Note: + * + * - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances, + * See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438. + * + * - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function. + * + * - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of + * incorrect length in some situations. + + * We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they + * get the Object implementation, which is slower but behaves correctly. + */ +Buffer.TYPED_ARRAY_SUPPORT = global.TYPED_ARRAY_SUPPORT !== undefined + ? global.TYPED_ARRAY_SUPPORT + : typedArraySupport() + +/* + * Export kMaxLength after typed array support is determined. + */ +exports.kMaxLength = kMaxLength() + +function typedArraySupport () { + try { + var arr = new Uint8Array(1) + arr.__proto__ = {__proto__: Uint8Array.prototype, foo: function () { return 42 }} + return arr.foo() === 42 && // typed array instances can be augmented + typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray` + arr.subarray(1, 1).byteLength === 0 // ie10 has broken `subarray` + } catch (e) { + return false + } +} + +function kMaxLength () { + return Buffer.TYPED_ARRAY_SUPPORT + ? 0x7fffffff + : 0x3fffffff +} + +function createBuffer (that, length) { + if (kMaxLength() < length) { + throw new RangeError('Invalid typed array length') + } + if (Buffer.TYPED_ARRAY_SUPPORT) { + // Return an augmented `Uint8Array` instance, for best performance + that = new Uint8Array(length) + that.__proto__ = Buffer.prototype + } else { + // Fallback: Return an object instance of the Buffer class + if (that === null) { + that = new Buffer(length) + } + that.length = length + } + + return that +} + +/** + * The Buffer constructor returns instances of `Uint8Array` that have their + * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of + * `Uint8Array`, so the returned instances will have all the node `Buffer` methods + * and the `Uint8Array` methods. Square bracket notation works as expected -- it + * returns a single octet. + * + * The `Uint8Array` prototype remains unmodified. + */ + +function Buffer (arg, encodingOrOffset, length) { + if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) { + return new Buffer(arg, encodingOrOffset, length) + } + + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new Error( + 'If encoding is specified then the first argument must be a string' + ) + } + return allocUnsafe(this, arg) + } + return from(this, arg, encodingOrOffset, length) +} + +Buffer.poolSize = 8192 // not used by this implementation + +// TODO: Legacy, not needed anymore. Remove in next major version. +Buffer._augment = function (arr) { + arr.__proto__ = Buffer.prototype + return arr +} + +function from (that, value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('"value" argument must not be a number') + } + + if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { + return fromArrayBuffer(that, value, encodingOrOffset, length) + } + + if (typeof value === 'string') { + return fromString(that, value, encodingOrOffset) + } + + return fromObject(that, value) +} + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + **/ +Buffer.from = function (value, encodingOrOffset, length) { + return from(null, value, encodingOrOffset, length) +} + +if (Buffer.TYPED_ARRAY_SUPPORT) { + Buffer.prototype.__proto__ = Uint8Array.prototype + Buffer.__proto__ = Uint8Array + if (typeof Symbol !== 'undefined' && Symbol.species && + Buffer[Symbol.species] === Buffer) { + // Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97 + Object.defineProperty(Buffer, Symbol.species, { + value: null, + configurable: true + }) + } +} + +function assertSize (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be a number') + } else if (size < 0) { + throw new RangeError('"size" argument must not be negative') + } +} + +function alloc (that, size, fill, encoding) { + assertSize(size) + if (size <= 0) { + return createBuffer(that, size) + } + if (fill !== undefined) { + // Only pay attention to encoding if it's a string. This + // prevents accidentally sending in a number that would + // be interpretted as a start offset. + return typeof encoding === 'string' + ? createBuffer(that, size).fill(fill, encoding) + : createBuffer(that, size).fill(fill) + } + return createBuffer(that, size) +} + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + **/ +Buffer.alloc = function (size, fill, encoding) { + return alloc(null, size, fill, encoding) +} + +function allocUnsafe (that, size) { + assertSize(size) + that = createBuffer(that, size < 0 ? 0 : checked(size) | 0) + if (!Buffer.TYPED_ARRAY_SUPPORT) { + for (var i = 0; i < size; ++i) { + that[i] = 0 + } + } + return that +} + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. + * */ +Buffer.allocUnsafe = function (size) { + return allocUnsafe(null, size) +} +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. + */ +Buffer.allocUnsafeSlow = function (size) { + return allocUnsafe(null, size) +} + +function fromString (that, string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('"encoding" must be a valid string encoding') + } + + var length = byteLength(string, encoding) | 0 + that = createBuffer(that, length) + + var actual = that.write(string, encoding) + + if (actual !== length) { + // Writing a hex string, for example, that contains invalid characters will + // cause everything after the first invalid character to be ignored. (e.g. + // 'abxxcd' will be treated as 'ab') + that = that.slice(0, actual) + } + + return that +} + +function fromArrayLike (that, array) { + var length = array.length < 0 ? 0 : checked(array.length) | 0 + that = createBuffer(that, length) + for (var i = 0; i < length; i += 1) { + that[i] = array[i] & 255 + } + return that +} + +function fromArrayBuffer (that, array, byteOffset, length) { + array.byteLength // this throws if `array` is not a valid ArrayBuffer + + if (byteOffset < 0 || array.byteLength < byteOffset) { + throw new RangeError('\'offset\' is out of bounds') + } + + if (array.byteLength < byteOffset + (length || 0)) { + throw new RangeError('\'length\' is out of bounds') + } + + if (byteOffset === undefined && length === undefined) { + array = new Uint8Array(array) + } else if (length === undefined) { + array = new Uint8Array(array, byteOffset) + } else { + array = new Uint8Array(array, byteOffset, length) + } + + if (Buffer.TYPED_ARRAY_SUPPORT) { + // Return an augmented `Uint8Array` instance, for best performance + that = array + that.__proto__ = Buffer.prototype + } else { + // Fallback: Return an object instance of the Buffer class + that = fromArrayLike(that, array) + } + return that +} + +function fromObject (that, obj) { + if (Buffer.isBuffer(obj)) { + var len = checked(obj.length) | 0 + that = createBuffer(that, len) + + if (that.length === 0) { + return that + } + + obj.copy(that, 0, 0, len) + return that + } + + if (obj) { + if ((typeof ArrayBuffer !== 'undefined' && + obj.buffer instanceof ArrayBuffer) || 'length' in obj) { + if (typeof obj.length !== 'number' || isnan(obj.length)) { + return createBuffer(that, 0) + } + return fromArrayLike(that, obj) + } + + if (obj.type === 'Buffer' && isArray(obj.data)) { + return fromArrayLike(that, obj.data) + } + } + + throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.') +} + +function checked (length) { + // Note: cannot use `length < kMaxLength()` here because that fails when + // length is NaN (which is otherwise coerced to zero.) + if (length >= kMaxLength()) { + throw new RangeError('Attempt to allocate Buffer larger than maximum ' + + 'size: 0x' + kMaxLength().toString(16) + ' bytes') + } + return length | 0 +} + +function SlowBuffer (length) { + if (+length != length) { // eslint-disable-line eqeqeq + length = 0 + } + return Buffer.alloc(+length) +} + +Buffer.isBuffer = function isBuffer (b) { + return !!(b != null && b._isBuffer) +} + +Buffer.compare = function compare (a, b) { + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + throw new TypeError('Arguments must be Buffers') + } + + if (a === b) return 0 + + var x = a.length + var y = b.length + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i] + y = b[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +Buffer.isEncoding = function isEncoding (encoding) { + switch (String(encoding).toLowerCase()) { + case 'hex': + case 'utf8': + case 'utf-8': + case 'ascii': + case 'latin1': + case 'binary': + case 'base64': + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return true + default: + return false + } +} + +Buffer.concat = function concat (list, length) { + if (!isArray(list)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + + if (list.length === 0) { + return Buffer.alloc(0) + } + + var i + if (length === undefined) { + length = 0 + for (i = 0; i < list.length; ++i) { + length += list[i].length + } + } + + var buffer = Buffer.allocUnsafe(length) + var pos = 0 + for (i = 0; i < list.length; ++i) { + var buf = list[i] + if (!Buffer.isBuffer(buf)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + buf.copy(buffer, pos) + pos += buf.length + } + return buffer +} + +function byteLength (string, encoding) { + if (Buffer.isBuffer(string)) { + return string.length + } + if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' && + (ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) { + return string.byteLength + } + if (typeof string !== 'string') { + string = '' + string + } + + var len = string.length + if (len === 0) return 0 + + // Use a for loop to avoid recursion + var loweredCase = false + for (;;) { + switch (encoding) { + case 'ascii': + case 'latin1': + case 'binary': + return len + case 'utf8': + case 'utf-8': + case undefined: + return utf8ToBytes(string).length + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return len * 2 + case 'hex': + return len >>> 1 + case 'base64': + return base64ToBytes(string).length + default: + if (loweredCase) return utf8ToBytes(string).length // assume utf8 + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} +Buffer.byteLength = byteLength + +function slowToString (encoding, start, end) { + var loweredCase = false + + // No need to verify that "this.length <= MAX_UINT32" since it's a read-only + // property of a typed array. + + // This behaves neither like String nor Uint8Array in that we set start/end + // to their upper/lower bounds if the value passed is out of range. + // undefined is handled specially as per ECMA-262 6th Edition, + // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. + if (start === undefined || start < 0) { + start = 0 + } + // Return early if start > this.length. Done here to prevent potential uint32 + // coercion fail below. + if (start > this.length) { + return '' + } + + if (end === undefined || end > this.length) { + end = this.length + } + + if (end <= 0) { + return '' + } + + // Force coersion to uint32. This will also coerce falsey/NaN values to 0. + end >>>= 0 + start >>>= 0 + + if (end <= start) { + return '' + } + + if (!encoding) encoding = 'utf8' + + while (true) { + switch (encoding) { + case 'hex': + return hexSlice(this, start, end) + + case 'utf8': + case 'utf-8': + return utf8Slice(this, start, end) + + case 'ascii': + return asciiSlice(this, start, end) + + case 'latin1': + case 'binary': + return latin1Slice(this, start, end) + + case 'base64': + return base64Slice(this, start, end) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16leSlice(this, start, end) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = (encoding + '').toLowerCase() + loweredCase = true + } + } +} + +// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect +// Buffer instances. +Buffer.prototype._isBuffer = true + +function swap (b, n, m) { + var i = b[n] + b[n] = b[m] + b[m] = i +} + +Buffer.prototype.swap16 = function swap16 () { + var len = this.length + if (len % 2 !== 0) { + throw new RangeError('Buffer size must be a multiple of 16-bits') + } + for (var i = 0; i < len; i += 2) { + swap(this, i, i + 1) + } + return this +} + +Buffer.prototype.swap32 = function swap32 () { + var len = this.length + if (len % 4 !== 0) { + throw new RangeError('Buffer size must be a multiple of 32-bits') + } + for (var i = 0; i < len; i += 4) { + swap(this, i, i + 3) + swap(this, i + 1, i + 2) + } + return this +} + +Buffer.prototype.swap64 = function swap64 () { + var len = this.length + if (len % 8 !== 0) { + throw new RangeError('Buffer size must be a multiple of 64-bits') + } + for (var i = 0; i < len; i += 8) { + swap(this, i, i + 7) + swap(this, i + 1, i + 6) + swap(this, i + 2, i + 5) + swap(this, i + 3, i + 4) + } + return this +} + +Buffer.prototype.toString = function toString () { + var length = this.length | 0 + if (length === 0) return '' + if (arguments.length === 0) return utf8Slice(this, 0, length) + return slowToString.apply(this, arguments) +} + +Buffer.prototype.equals = function equals (b) { + if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') + if (this === b) return true + return Buffer.compare(this, b) === 0 +} + +Buffer.prototype.inspect = function inspect () { + var str = '' + var max = exports.INSPECT_MAX_BYTES + if (this.length > 0) { + str = this.toString('hex', 0, max).match(/.{2}/g).join(' ') + if (this.length > max) str += ' ... ' + } + return '' +} + +Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { + if (!Buffer.isBuffer(target)) { + throw new TypeError('Argument must be a Buffer') + } + + if (start === undefined) { + start = 0 + } + if (end === undefined) { + end = target ? target.length : 0 + } + if (thisStart === undefined) { + thisStart = 0 + } + if (thisEnd === undefined) { + thisEnd = this.length + } + + if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { + throw new RangeError('out of range index') + } + + if (thisStart >= thisEnd && start >= end) { + return 0 + } + if (thisStart >= thisEnd) { + return -1 + } + if (start >= end) { + return 1 + } + + start >>>= 0 + end >>>= 0 + thisStart >>>= 0 + thisEnd >>>= 0 + + if (this === target) return 0 + + var x = thisEnd - thisStart + var y = end - start + var len = Math.min(x, y) + + var thisCopy = this.slice(thisStart, thisEnd) + var targetCopy = target.slice(start, end) + + for (var i = 0; i < len; ++i) { + if (thisCopy[i] !== targetCopy[i]) { + x = thisCopy[i] + y = targetCopy[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant is val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { + // Empty buffer means no match + if (buffer.length === 0) return -1 + + // Normalize byteOffset + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000 + } + byteOffset = +byteOffset // Coerce to Number. + if (isNaN(byteOffset)) { + // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer + byteOffset = dir ? 0 : (buffer.length - 1) + } + + // Normalize byteOffset: negative offsets start from the end of the buffer + if (byteOffset < 0) byteOffset = buffer.length + byteOffset + if (byteOffset >= buffer.length) { + if (dir) return -1 + else byteOffset = buffer.length - 1 + } else if (byteOffset < 0) { + if (dir) byteOffset = 0 + else return -1 + } + + // Normalize val + if (typeof val === 'string') { + val = Buffer.from(val, encoding) + } + + // Finally, search either indexOf (if dir is true) or lastIndexOf + if (Buffer.isBuffer(val)) { + // Special case: looking for empty string/buffer always fails + if (val.length === 0) { + return -1 + } + return arrayIndexOf(buffer, val, byteOffset, encoding, dir) + } else if (typeof val === 'number') { + val = val & 0xFF // Search for a byte value [0-255] + if (Buffer.TYPED_ARRAY_SUPPORT && + typeof Uint8Array.prototype.indexOf === 'function') { + if (dir) { + return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) + } else { + return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) + } + } + return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir) + } + + throw new TypeError('val must be string, number or Buffer') +} + +function arrayIndexOf (arr, val, byteOffset, encoding, dir) { + var indexSize = 1 + var arrLength = arr.length + var valLength = val.length + + if (encoding !== undefined) { + encoding = String(encoding).toLowerCase() + if (encoding === 'ucs2' || encoding === 'ucs-2' || + encoding === 'utf16le' || encoding === 'utf-16le') { + if (arr.length < 2 || val.length < 2) { + return -1 + } + indexSize = 2 + arrLength /= 2 + valLength /= 2 + byteOffset /= 2 + } + } + + function read (buf, i) { + if (indexSize === 1) { + return buf[i] + } else { + return buf.readUInt16BE(i * indexSize) + } + } + + var i + if (dir) { + var foundIndex = -1 + for (i = byteOffset; i < arrLength; i++) { + if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === valLength) return foundIndex * indexSize + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength + for (i = byteOffset; i >= 0; i--) { + var found = true + for (var j = 0; j < valLength; j++) { + if (read(arr, i + j) !== read(val, j)) { + found = false + break + } + } + if (found) return i + } + } + + return -1 +} + +Buffer.prototype.includes = function includes (val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1 +} + +Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true) +} + +Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false) +} + +function hexWrite (buf, string, offset, length) { + offset = Number(offset) || 0 + var remaining = buf.length - offset + if (!length) { + length = remaining + } else { + length = Number(length) + if (length > remaining) { + length = remaining + } + } + + // must be an even number of digits + var strLen = string.length + if (strLen % 2 !== 0) throw new TypeError('Invalid hex string') + + if (length > strLen / 2) { + length = strLen / 2 + } + for (var i = 0; i < length; ++i) { + var parsed = parseInt(string.substr(i * 2, 2), 16) + if (isNaN(parsed)) return i + buf[offset + i] = parsed + } + return i +} + +function utf8Write (buf, string, offset, length) { + return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) +} + +function asciiWrite (buf, string, offset, length) { + return blitBuffer(asciiToBytes(string), buf, offset, length) +} + +function latin1Write (buf, string, offset, length) { + return asciiWrite(buf, string, offset, length) +} + +function base64Write (buf, string, offset, length) { + return blitBuffer(base64ToBytes(string), buf, offset, length) +} + +function ucs2Write (buf, string, offset, length) { + return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) +} + +Buffer.prototype.write = function write (string, offset, length, encoding) { + // Buffer#write(string) + if (offset === undefined) { + encoding = 'utf8' + length = this.length + offset = 0 + // Buffer#write(string, encoding) + } else if (length === undefined && typeof offset === 'string') { + encoding = offset + length = this.length + offset = 0 + // Buffer#write(string, offset[, length][, encoding]) + } else if (isFinite(offset)) { + offset = offset | 0 + if (isFinite(length)) { + length = length | 0 + if (encoding === undefined) encoding = 'utf8' + } else { + encoding = length + length = undefined + } + // legacy write(string, encoding, offset, length) - remove in v0.13 + } else { + throw new Error( + 'Buffer.write(string, encoding, offset[, length]) is no longer supported' + ) + } + + var remaining = this.length - offset + if (length === undefined || length > remaining) length = remaining + + if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { + throw new RangeError('Attempt to write outside buffer bounds') + } + + if (!encoding) encoding = 'utf8' + + var loweredCase = false + for (;;) { + switch (encoding) { + case 'hex': + return hexWrite(this, string, offset, length) + + case 'utf8': + case 'utf-8': + return utf8Write(this, string, offset, length) + + case 'ascii': + return asciiWrite(this, string, offset, length) + + case 'latin1': + case 'binary': + return latin1Write(this, string, offset, length) + + case 'base64': + // Warning: maxLength not taken into account in base64Write + return base64Write(this, string, offset, length) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return ucs2Write(this, string, offset, length) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} + +Buffer.prototype.toJSON = function toJSON () { + return { + type: 'Buffer', + data: Array.prototype.slice.call(this._arr || this, 0) + } +} + +function base64Slice (buf, start, end) { + if (start === 0 && end === buf.length) { + return base64.fromByteArray(buf) + } else { + return base64.fromByteArray(buf.slice(start, end)) + } +} + +function utf8Slice (buf, start, end) { + end = Math.min(buf.length, end) + var res = [] + + var i = start + while (i < end) { + var firstByte = buf[i] + var codePoint = null + var bytesPerSequence = (firstByte > 0xEF) ? 4 + : (firstByte > 0xDF) ? 3 + : (firstByte > 0xBF) ? 2 + : 1 + + if (i + bytesPerSequence <= end) { + var secondByte, thirdByte, fourthByte, tempCodePoint + + switch (bytesPerSequence) { + case 1: + if (firstByte < 0x80) { + codePoint = firstByte + } + break + case 2: + secondByte = buf[i + 1] + if ((secondByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) + if (tempCodePoint > 0x7F) { + codePoint = tempCodePoint + } + } + break + case 3: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) + if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { + codePoint = tempCodePoint + } + } + break + case 4: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + fourthByte = buf[i + 3] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) + if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { + codePoint = tempCodePoint + } + } + } + } + + if (codePoint === null) { + // we did not generate a valid codePoint so insert a + // replacement char (U+FFFD) and advance only 1 byte + codePoint = 0xFFFD + bytesPerSequence = 1 + } else if (codePoint > 0xFFFF) { + // encode to utf16 (surrogate pair dance) + codePoint -= 0x10000 + res.push(codePoint >>> 10 & 0x3FF | 0xD800) + codePoint = 0xDC00 | codePoint & 0x3FF + } + + res.push(codePoint) + i += bytesPerSequence + } + + return decodeCodePointsArray(res) +} + +// Based on http://stackoverflow.com/a/22747272/680742, the browser with +// the lowest limit is Chrome, with 0x10000 args. +// We go 1 magnitude less, for safety +var MAX_ARGUMENTS_LENGTH = 0x1000 + +function decodeCodePointsArray (codePoints) { + var len = codePoints.length + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints) // avoid extra slice() + } + + // Decode in chunks to avoid "call stack size exceeded". + var res = '' + var i = 0 + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ) + } + return res +} + +function asciiSlice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i] & 0x7F) + } + return ret +} + +function latin1Slice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i]) + } + return ret +} + +function hexSlice (buf, start, end) { + var len = buf.length + + if (!start || start < 0) start = 0 + if (!end || end < 0 || end > len) end = len + + var out = '' + for (var i = start; i < end; ++i) { + out += toHex(buf[i]) + } + return out +} + +function utf16leSlice (buf, start, end) { + var bytes = buf.slice(start, end) + var res = '' + for (var i = 0; i < bytes.length; i += 2) { + res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256) + } + return res +} + +Buffer.prototype.slice = function slice (start, end) { + var len = this.length + start = ~~start + end = end === undefined ? len : ~~end + + if (start < 0) { + start += len + if (start < 0) start = 0 + } else if (start > len) { + start = len + } + + if (end < 0) { + end += len + if (end < 0) end = 0 + } else if (end > len) { + end = len + } + + if (end < start) end = start + + var newBuf + if (Buffer.TYPED_ARRAY_SUPPORT) { + newBuf = this.subarray(start, end) + newBuf.__proto__ = Buffer.prototype + } else { + var sliceLen = end - start + newBuf = new Buffer(sliceLen, undefined) + for (var i = 0; i < sliceLen; ++i) { + newBuf[i] = this[i + start] + } + } + + return newBuf +} + +/* + * Need to make sure that buffer isn't trying to write out of bounds. + */ +function checkOffset (offset, ext, length) { + if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') + if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') +} + +Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + + return val +} + +Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) { + checkOffset(offset, byteLength, this.length) + } + + var val = this[offset + --byteLength] + var mul = 1 + while (byteLength > 0 && (mul *= 0x100)) { + val += this[offset + --byteLength] * mul + } + + return val +} + +Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { + if (!noAssert) checkOffset(offset, 1, this.length) + return this[offset] +} + +Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + return this[offset] | (this[offset + 1] << 8) +} + +Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + return (this[offset] << 8) | this[offset + 1] +} + +Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return ((this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16)) + + (this[offset + 3] * 0x1000000) +} + +Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] * 0x1000000) + + ((this[offset + 1] << 16) | + (this[offset + 2] << 8) | + this[offset + 3]) +} + +Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var i = byteLength + var mul = 1 + var val = this[offset + --i] + while (i > 0 && (mul *= 0x100)) { + val += this[offset + --i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { + if (!noAssert) checkOffset(offset, 1, this.length) + if (!(this[offset] & 0x80)) return (this[offset]) + return ((0xff - this[offset] + 1) * -1) +} + +Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset] | (this[offset + 1] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset + 1] | (this[offset] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16) | + (this[offset + 3] << 24) +} + +Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] << 24) | + (this[offset + 1] << 16) | + (this[offset + 2] << 8) | + (this[offset + 3]) +} + +Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, true, 23, 4) +} + +Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, false, 23, 4) +} + +Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, true, 52, 8) +} + +Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, false, 52, 8) +} + +function checkInt (buf, value, offset, ext, max, min) { + if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') + if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') + if (offset + ext > buf.length) throw new RangeError('Index out of range') +} + +Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var mul = 1 + var i = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var i = byteLength - 1 + var mul = 1 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) + if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) + this[offset] = (value & 0xff) + return offset + 1 +} + +function objectWriteUInt16 (buf, value, offset, littleEndian) { + if (value < 0) value = 0xffff + value + 1 + for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) { + buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> + (littleEndian ? i : 1 - i) * 8 + } +} + +Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + } else { + objectWriteUInt16(this, value, offset, true) + } + return offset + 2 +} + +Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + } else { + objectWriteUInt16(this, value, offset, false) + } + return offset + 2 +} + +function objectWriteUInt32 (buf, value, offset, littleEndian) { + if (value < 0) value = 0xffffffff + value + 1 + for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) { + buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff + } +} + +Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset + 3] = (value >>> 24) + this[offset + 2] = (value >>> 16) + this[offset + 1] = (value >>> 8) + this[offset] = (value & 0xff) + } else { + objectWriteUInt32(this, value, offset, true) + } + return offset + 4 +} + +Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + } else { + objectWriteUInt32(this, value, offset, false) + } + return offset + 4 +} + +Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) { + var limit = Math.pow(2, 8 * byteLength - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = 0 + var mul = 1 + var sub = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) { + var limit = Math.pow(2, 8 * byteLength - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = byteLength - 1 + var mul = 1 + var sub = 0 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) + if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) + if (value < 0) value = 0xff + value + 1 + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + } else { + objectWriteUInt16(this, value, offset, true) + } + return offset + 2 +} + +Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + } else { + objectWriteUInt16(this, value, offset, false) + } + return offset + 2 +} + +Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + this[offset + 2] = (value >>> 16) + this[offset + 3] = (value >>> 24) + } else { + objectWriteUInt32(this, value, offset, true) + } + return offset + 4 +} + +Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (value < 0) value = 0xffffffff + value + 1 + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + } else { + objectWriteUInt32(this, value, offset, false) + } + return offset + 4 +} + +function checkIEEE754 (buf, value, offset, ext, max, min) { + if (offset + ext > buf.length) throw new RangeError('Index out of range') + if (offset < 0) throw new RangeError('Index out of range') +} + +function writeFloat (buf, value, offset, littleEndian, noAssert) { + if (!noAssert) { + checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) + } + ieee754.write(buf, value, offset, littleEndian, 23, 4) + return offset + 4 +} + +Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { + return writeFloat(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { + return writeFloat(this, value, offset, false, noAssert) +} + +function writeDouble (buf, value, offset, littleEndian, noAssert) { + if (!noAssert) { + checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) + } + ieee754.write(buf, value, offset, littleEndian, 52, 8) + return offset + 8 +} + +Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { + return writeDouble(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { + return writeDouble(this, value, offset, false, noAssert) +} + +// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) +Buffer.prototype.copy = function copy (target, targetStart, start, end) { + if (!start) start = 0 + if (!end && end !== 0) end = this.length + if (targetStart >= target.length) targetStart = target.length + if (!targetStart) targetStart = 0 + if (end > 0 && end < start) end = start + + // Copy 0 bytes; we're done + if (end === start) return 0 + if (target.length === 0 || this.length === 0) return 0 + + // Fatal error conditions + if (targetStart < 0) { + throw new RangeError('targetStart out of bounds') + } + if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds') + if (end < 0) throw new RangeError('sourceEnd out of bounds') + + // Are we oob? + if (end > this.length) end = this.length + if (target.length - targetStart < end - start) { + end = target.length - targetStart + start + } + + var len = end - start + var i + + if (this === target && start < targetStart && targetStart < end) { + // descending copy from end + for (i = len - 1; i >= 0; --i) { + target[i + targetStart] = this[i + start] + } + } else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) { + // ascending copy from start + for (i = 0; i < len; ++i) { + target[i + targetStart] = this[i + start] + } + } else { + Uint8Array.prototype.set.call( + target, + this.subarray(start, start + len), + targetStart + ) + } + + return len +} + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill (val, start, end, encoding) { + // Handle string cases: + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = this.length + } else if (typeof end === 'string') { + encoding = end + end = this.length + } + if (val.length === 1) { + var code = val.charCodeAt(0) + if (code < 256) { + val = code + } + } + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + } else if (typeof val === 'number') { + val = val & 255 + } + + // Invalid ranges are not set to a default, so can range check early. + if (start < 0 || this.length < start || this.length < end) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return this + } + + start = start >>> 0 + end = end === undefined ? this.length : end >>> 0 + + if (!val) val = 0 + + var i + if (typeof val === 'number') { + for (i = start; i < end; ++i) { + this[i] = val + } + } else { + var bytes = Buffer.isBuffer(val) + ? val + : utf8ToBytes(new Buffer(val, encoding).toString()) + var len = bytes.length + for (i = 0; i < end - start; ++i) { + this[i + start] = bytes[i % len] + } + } + + return this +} + +// HELPER FUNCTIONS +// ================ + +var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g + +function base64clean (str) { + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = stringtrim(str).replace(INVALID_BASE64_RE, '') + // Node converts strings with length < 2 to '' + if (str.length < 2) return '' + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '=' + } + return str +} + +function stringtrim (str) { + if (str.trim) return str.trim() + return str.replace(/^\s+|\s+$/g, '') +} + +function toHex (n) { + if (n < 16) return '0' + n.toString(16) + return n.toString(16) +} + +function utf8ToBytes (string, units) { + units = units || Infinity + var codePoint + var length = string.length + var leadSurrogate = null + var bytes = [] + + for (var i = 0; i < length; ++i) { + codePoint = string.charCodeAt(i) + + // is surrogate component + if (codePoint > 0xD7FF && codePoint < 0xE000) { + // last char was a lead + if (!leadSurrogate) { + // no lead yet + if (codePoint > 0xDBFF) { + // unexpected trail + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } else if (i + 1 === length) { + // unpaired lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } + + // valid lead + leadSurrogate = codePoint + + continue + } + + // 2 leads in a row + if (codePoint < 0xDC00) { + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + leadSurrogate = codePoint + continue + } + + // valid surrogate pair + codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 + } else if (leadSurrogate) { + // valid bmp char, but last char was a lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + } + + leadSurrogate = null + + // encode utf8 + if (codePoint < 0x80) { + if ((units -= 1) < 0) break + bytes.push(codePoint) + } else if (codePoint < 0x800) { + if ((units -= 2) < 0) break + bytes.push( + codePoint >> 0x6 | 0xC0, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x10000) { + if ((units -= 3) < 0) break + bytes.push( + codePoint >> 0xC | 0xE0, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x110000) { + if ((units -= 4) < 0) break + bytes.push( + codePoint >> 0x12 | 0xF0, + codePoint >> 0xC & 0x3F | 0x80, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else { + throw new Error('Invalid code point') + } + } + + return bytes +} + +function asciiToBytes (str) { + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + // Node's code seems to be doing this and not & 0x7F.. + byteArray.push(str.charCodeAt(i) & 0xFF) + } + return byteArray +} + +function utf16leToBytes (str, units) { + var c, hi, lo + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + if ((units -= 2) < 0) break + + c = str.charCodeAt(i) + hi = c >> 8 + lo = c % 256 + byteArray.push(lo) + byteArray.push(hi) + } + + return byteArray +} + +function base64ToBytes (str) { + return base64.toByteArray(base64clean(str)) +} + +function blitBuffer (src, dst, offset, length) { + for (var i = 0; i < length; ++i) { + if ((i + offset >= dst.length) || (i >= src.length)) break + dst[i + offset] = src[i] + } + return i +} + +function isnan (val) { + return val !== val // eslint-disable-line no-self-compare +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 4 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +var Readable = __webpack_require__(15); +var Writable = __webpack_require__(18); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; + +/***/ }), +/* 5 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(Buffer) {// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === '[object Array]'; +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = Buffer.isBuffer; + +function objectToString(o) { + return Object.prototype.toString.call(o); +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(3).Buffer)) + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(process) { + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1))) + +/***/ }), +/* 7 */ +/***/ (function(module, exports, __webpack_require__) { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __webpack_require__(3) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), +/* 8 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +var punycode = __webpack_require__(25); +var util = __webpack_require__(27); + +exports.parse = urlParse; +exports.resolve = urlResolve; +exports.resolveObject = urlResolveObject; +exports.format = urlFormat; + +exports.Url = Url; + +function Url() { + this.protocol = null; + this.slashes = null; + this.auth = null; + this.host = null; + this.port = null; + this.hostname = null; + this.hash = null; + this.search = null; + this.query = null; + this.pathname = null; + this.path = null; + this.href = null; +} + +// Reference: RFC 3986, RFC 1808, RFC 2396 + +// define these here so at least they only have to be +// compiled once on the first module load. +var protocolPattern = /^([a-z0-9.+-]+:)/i, + portPattern = /:[0-9]*$/, + + // Special case for a simple path URL + simplePathPattern = /^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/, + + // RFC 2396: characters reserved for delimiting URLs. + // We actually just auto-escape these. + delims = ['<', '>', '"', '`', ' ', '\r', '\n', '\t'], + + // RFC 2396: characters not allowed for various reasons. + unwise = ['{', '}', '|', '\\', '^', '`'].concat(delims), + + // Allowed by RFCs, but cause of XSS attacks. Always escape these. + autoEscape = ['\''].concat(unwise), + // Characters that are never ever allowed in a hostname. + // Note that any invalid chars are also handled, but these + // are the ones that are *expected* to be seen, so we fast-path + // them. + nonHostChars = ['%', '/', '?', ';', '#'].concat(autoEscape), + hostEndingChars = ['/', '?', '#'], + hostnameMaxLen = 255, + hostnamePartPattern = /^[+a-z0-9A-Z_-]{0,63}$/, + hostnamePartStart = /^([+a-z0-9A-Z_-]{0,63})(.*)$/, + // protocols that can allow "unsafe" and "unwise" chars. + unsafeProtocol = { + 'javascript': true, + 'javascript:': true + }, + // protocols that never have a hostname. + hostlessProtocol = { + 'javascript': true, + 'javascript:': true + }, + // protocols that always contain a // bit. + slashedProtocol = { + 'http': true, + 'https': true, + 'ftp': true, + 'gopher': true, + 'file': true, + 'http:': true, + 'https:': true, + 'ftp:': true, + 'gopher:': true, + 'file:': true + }, + querystring = __webpack_require__(28); + +function urlParse(url, parseQueryString, slashesDenoteHost) { + if (url && util.isObject(url) && url instanceof Url) return url; + + var u = new Url; + u.parse(url, parseQueryString, slashesDenoteHost); + return u; +} + +Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) { + if (!util.isString(url)) { + throw new TypeError("Parameter 'url' must be a string, not " + typeof url); + } + + // Copy chrome, IE, opera backslash-handling behavior. + // Back slashes before the query string get converted to forward slashes + // See: https://code.google.com/p/chromium/issues/detail?id=25916 + var queryIndex = url.indexOf('?'), + splitter = + (queryIndex !== -1 && queryIndex < url.indexOf('#')) ? '?' : '#', + uSplit = url.split(splitter), + slashRegex = /\\/g; + uSplit[0] = uSplit[0].replace(slashRegex, '/'); + url = uSplit.join(splitter); + + var rest = url; + + // trim before proceeding. + // This is to support parse stuff like " http://foo.com \n" + rest = rest.trim(); + + if (!slashesDenoteHost && url.split('#').length === 1) { + // Try fast path regexp + var simplePath = simplePathPattern.exec(rest); + if (simplePath) { + this.path = rest; + this.href = rest; + this.pathname = simplePath[1]; + if (simplePath[2]) { + this.search = simplePath[2]; + if (parseQueryString) { + this.query = querystring.parse(this.search.substr(1)); + } else { + this.query = this.search.substr(1); + } + } else if (parseQueryString) { + this.search = ''; + this.query = {}; + } + return this; + } + } + + var proto = protocolPattern.exec(rest); + if (proto) { + proto = proto[0]; + var lowerProto = proto.toLowerCase(); + this.protocol = lowerProto; + rest = rest.substr(proto.length); + } + + // figure out if it's got a host + // user@server is *always* interpreted as a hostname, and url + // resolution will treat //foo/bar as host=foo,path=bar because that's + // how the browser resolves relative URLs. + if (slashesDenoteHost || proto || rest.match(/^\/\/[^@\/]+@[^@\/]+/)) { + var slashes = rest.substr(0, 2) === '//'; + if (slashes && !(proto && hostlessProtocol[proto])) { + rest = rest.substr(2); + this.slashes = true; + } + } + + if (!hostlessProtocol[proto] && + (slashes || (proto && !slashedProtocol[proto]))) { + + // there's a hostname. + // the first instance of /, ?, ;, or # ends the host. + // + // If there is an @ in the hostname, then non-host chars *are* allowed + // to the left of the last @ sign, unless some host-ending character + // comes *before* the @-sign. + // URLs are obnoxious. + // + // ex: + // http://a@b@c/ => user:a@b host:c + // http://a@b?@c => user:a host:c path:/?@c + + // v0.12 TODO(isaacs): This is not quite how Chrome does things. + // Review our test case against browsers more comprehensively. + + // find the first instance of any hostEndingChars + var hostEnd = -1; + for (var i = 0; i < hostEndingChars.length; i++) { + var hec = rest.indexOf(hostEndingChars[i]); + if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) + hostEnd = hec; + } + + // at this point, either we have an explicit point where the + // auth portion cannot go past, or the last @ char is the decider. + var auth, atSign; + if (hostEnd === -1) { + // atSign can be anywhere. + atSign = rest.lastIndexOf('@'); + } else { + // atSign must be in auth portion. + // http://a@b/c@d => host:b auth:a path:/c@d + atSign = rest.lastIndexOf('@', hostEnd); + } + + // Now we have a portion which is definitely the auth. + // Pull that off. + if (atSign !== -1) { + auth = rest.slice(0, atSign); + rest = rest.slice(atSign + 1); + this.auth = decodeURIComponent(auth); + } + + // the host is the remaining to the left of the first non-host char + hostEnd = -1; + for (var i = 0; i < nonHostChars.length; i++) { + var hec = rest.indexOf(nonHostChars[i]); + if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) + hostEnd = hec; + } + // if we still have not hit it, then the entire thing is a host. + if (hostEnd === -1) + hostEnd = rest.length; + + this.host = rest.slice(0, hostEnd); + rest = rest.slice(hostEnd); + + // pull out port. + this.parseHost(); + + // we've indicated that there is a hostname, + // so even if it's empty, it has to be present. + this.hostname = this.hostname || ''; + + // if hostname begins with [ and ends with ] + // assume that it's an IPv6 address. + var ipv6Hostname = this.hostname[0] === '[' && + this.hostname[this.hostname.length - 1] === ']'; + + // validate a little. + if (!ipv6Hostname) { + var hostparts = this.hostname.split(/\./); + for (var i = 0, l = hostparts.length; i < l; i++) { + var part = hostparts[i]; + if (!part) continue; + if (!part.match(hostnamePartPattern)) { + var newpart = ''; + for (var j = 0, k = part.length; j < k; j++) { + if (part.charCodeAt(j) > 127) { + // we replace non-ASCII char with a temporary placeholder + // we need this to make sure size of hostname is not + // broken by replacing non-ASCII by nothing + newpart += 'x'; + } else { + newpart += part[j]; + } + } + // we test again with ASCII char only + if (!newpart.match(hostnamePartPattern)) { + var validParts = hostparts.slice(0, i); + var notHost = hostparts.slice(i + 1); + var bit = part.match(hostnamePartStart); + if (bit) { + validParts.push(bit[1]); + notHost.unshift(bit[2]); + } + if (notHost.length) { + rest = '/' + notHost.join('.') + rest; + } + this.hostname = validParts.join('.'); + break; + } + } + } + } + + if (this.hostname.length > hostnameMaxLen) { + this.hostname = ''; + } else { + // hostnames are always lower case. + this.hostname = this.hostname.toLowerCase(); + } + + if (!ipv6Hostname) { + // IDNA Support: Returns a punycoded representation of "domain". + // It only converts parts of the domain name that + // have non-ASCII characters, i.e. it doesn't matter if + // you call it with a domain that already is ASCII-only. + this.hostname = punycode.toASCII(this.hostname); + } + + var p = this.port ? ':' + this.port : ''; + var h = this.hostname || ''; + this.host = h + p; + this.href += this.host; + + // strip [ and ] from the hostname + // the host field still retains them, though + if (ipv6Hostname) { + this.hostname = this.hostname.substr(1, this.hostname.length - 2); + if (rest[0] !== '/') { + rest = '/' + rest; + } + } + } + + // now rest is set to the post-host stuff. + // chop off any delim chars. + if (!unsafeProtocol[lowerProto]) { + + // First, make 100% sure that any "autoEscape" chars get + // escaped, even if encodeURIComponent doesn't think they + // need to be. + for (var i = 0, l = autoEscape.length; i < l; i++) { + var ae = autoEscape[i]; + if (rest.indexOf(ae) === -1) + continue; + var esc = encodeURIComponent(ae); + if (esc === ae) { + esc = escape(ae); + } + rest = rest.split(ae).join(esc); + } + } + + + // chop off from the tail first. + var hash = rest.indexOf('#'); + if (hash !== -1) { + // got a fragment string. + this.hash = rest.substr(hash); + rest = rest.slice(0, hash); + } + var qm = rest.indexOf('?'); + if (qm !== -1) { + this.search = rest.substr(qm); + this.query = rest.substr(qm + 1); + if (parseQueryString) { + this.query = querystring.parse(this.query); + } + rest = rest.slice(0, qm); + } else if (parseQueryString) { + // no query string, but parseQueryString still requested + this.search = ''; + this.query = {}; + } + if (rest) this.pathname = rest; + if (slashedProtocol[lowerProto] && + this.hostname && !this.pathname) { + this.pathname = '/'; + } + + //to support http.request + if (this.pathname || this.search) { + var p = this.pathname || ''; + var s = this.search || ''; + this.path = p + s; + } + + // finally, reconstruct the href based on what has been validated. + this.href = this.format(); + return this; +}; + +// format a parsed object into a url string +function urlFormat(obj) { + // ensure it's an object, and not a string url. + // If it's an obj, this is a no-op. + // this way, you can call url_format() on strings + // to clean up potentially wonky urls. + if (util.isString(obj)) obj = urlParse(obj); + if (!(obj instanceof Url)) return Url.prototype.format.call(obj); + return obj.format(); +} + +Url.prototype.format = function() { + var auth = this.auth || ''; + if (auth) { + auth = encodeURIComponent(auth); + auth = auth.replace(/%3A/i, ':'); + auth += '@'; + } + + var protocol = this.protocol || '', + pathname = this.pathname || '', + hash = this.hash || '', + host = false, + query = ''; + + if (this.host) { + host = auth + this.host; + } else if (this.hostname) { + host = auth + (this.hostname.indexOf(':') === -1 ? + this.hostname : + '[' + this.hostname + ']'); + if (this.port) { + host += ':' + this.port; + } + } + + if (this.query && + util.isObject(this.query) && + Object.keys(this.query).length) { + query = querystring.stringify(this.query); + } + + var search = this.search || (query && ('?' + query)) || ''; + + if (protocol && protocol.substr(-1) !== ':') protocol += ':'; + + // only the slashedProtocols get the //. Not mailto:, xmpp:, etc. + // unless they had them to begin with. + if (this.slashes || + (!protocol || slashedProtocol[protocol]) && host !== false) { + host = '//' + (host || ''); + if (pathname && pathname.charAt(0) !== '/') pathname = '/' + pathname; + } else if (!host) { + host = ''; + } + + if (hash && hash.charAt(0) !== '#') hash = '#' + hash; + if (search && search.charAt(0) !== '?') search = '?' + search; + + pathname = pathname.replace(/[?#]/g, function(match) { + return encodeURIComponent(match); + }); + search = search.replace('#', '%23'); + + return protocol + host + pathname + search + hash; +}; + +function urlResolve(source, relative) { + return urlParse(source, false, true).resolve(relative); +} + +Url.prototype.resolve = function(relative) { + return this.resolveObject(urlParse(relative, false, true)).format(); +}; + +function urlResolveObject(source, relative) { + if (!source) return relative; + return urlParse(source, false, true).resolveObject(relative); +} + +Url.prototype.resolveObject = function(relative) { + if (util.isString(relative)) { + var rel = new Url(); + rel.parse(relative, false, true); + relative = rel; + } + + var result = new Url(); + var tkeys = Object.keys(this); + for (var tk = 0; tk < tkeys.length; tk++) { + var tkey = tkeys[tk]; + result[tkey] = this[tkey]; + } + + // hash is always overridden, no matter what. + // even href="" will remove it. + result.hash = relative.hash; + + // if the relative url is empty, then there's nothing left to do here. + if (relative.href === '') { + result.href = result.format(); + return result; + } + + // hrefs like //foo/bar always cut to the protocol. + if (relative.slashes && !relative.protocol) { + // take everything except the protocol from relative + var rkeys = Object.keys(relative); + for (var rk = 0; rk < rkeys.length; rk++) { + var rkey = rkeys[rk]; + if (rkey !== 'protocol') + result[rkey] = relative[rkey]; + } + + //urlParse appends trailing / to urls like http://www.example.com + if (slashedProtocol[result.protocol] && + result.hostname && !result.pathname) { + result.path = result.pathname = '/'; + } + + result.href = result.format(); + return result; + } + + if (relative.protocol && relative.protocol !== result.protocol) { + // if it's a known url protocol, then changing + // the protocol does weird things + // first, if it's not file:, then we MUST have a host, + // and if there was a path + // to begin with, then we MUST have a path. + // if it is file:, then the host is dropped, + // because that's known to be hostless. + // anything else is assumed to be absolute. + if (!slashedProtocol[relative.protocol]) { + var keys = Object.keys(relative); + for (var v = 0; v < keys.length; v++) { + var k = keys[v]; + result[k] = relative[k]; + } + result.href = result.format(); + return result; + } + + result.protocol = relative.protocol; + if (!relative.host && !hostlessProtocol[relative.protocol]) { + var relPath = (relative.pathname || '').split('/'); + while (relPath.length && !(relative.host = relPath.shift())); + if (!relative.host) relative.host = ''; + if (!relative.hostname) relative.hostname = ''; + if (relPath[0] !== '') relPath.unshift(''); + if (relPath.length < 2) relPath.unshift(''); + result.pathname = relPath.join('/'); + } else { + result.pathname = relative.pathname; + } + result.search = relative.search; + result.query = relative.query; + result.host = relative.host || ''; + result.auth = relative.auth; + result.hostname = relative.hostname || relative.host; + result.port = relative.port; + // to support http.request + if (result.pathname || result.search) { + var p = result.pathname || ''; + var s = result.search || ''; + result.path = p + s; + } + result.slashes = result.slashes || relative.slashes; + result.href = result.format(); + return result; + } + + var isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'), + isRelAbs = ( + relative.host || + relative.pathname && relative.pathname.charAt(0) === '/' + ), + mustEndAbs = (isRelAbs || isSourceAbs || + (result.host && relative.pathname)), + removeAllDots = mustEndAbs, + srcPath = result.pathname && result.pathname.split('/') || [], + relPath = relative.pathname && relative.pathname.split('/') || [], + psychotic = result.protocol && !slashedProtocol[result.protocol]; + + // if the url is a non-slashed url, then relative + // links like ../.. should be able + // to crawl up to the hostname, as well. This is strange. + // result.protocol has already been set by now. + // Later on, put the first path part into the host field. + if (psychotic) { + result.hostname = ''; + result.port = null; + if (result.host) { + if (srcPath[0] === '') srcPath[0] = result.host; + else srcPath.unshift(result.host); + } + result.host = ''; + if (relative.protocol) { + relative.hostname = null; + relative.port = null; + if (relative.host) { + if (relPath[0] === '') relPath[0] = relative.host; + else relPath.unshift(relative.host); + } + relative.host = null; + } + mustEndAbs = mustEndAbs && (relPath[0] === '' || srcPath[0] === ''); + } + + if (isRelAbs) { + // it's absolute. + result.host = (relative.host || relative.host === '') ? + relative.host : result.host; + result.hostname = (relative.hostname || relative.hostname === '') ? + relative.hostname : result.hostname; + result.search = relative.search; + result.query = relative.query; + srcPath = relPath; + // fall through to the dot-handling below. + } else if (relPath.length) { + // it's relative + // throw away the existing file, and take the new path instead. + if (!srcPath) srcPath = []; + srcPath.pop(); + srcPath = srcPath.concat(relPath); + result.search = relative.search; + result.query = relative.query; + } else if (!util.isNullOrUndefined(relative.search)) { + // just pull out the search. + // like href='?foo'. + // Put this after the other two cases because it simplifies the booleans + if (psychotic) { + result.hostname = result.host = srcPath.shift(); + //occationaly the auth can get stuck only in host + //this especially happens in cases like + //url.resolveObject('mailto:local1@domain1', 'local2@domain2') + var authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + result.search = relative.search; + result.query = relative.query; + //to support http.request + if (!util.isNull(result.pathname) || !util.isNull(result.search)) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.href = result.format(); + return result; + } + + if (!srcPath.length) { + // no path at all. easy. + // we've already handled the other stuff above. + result.pathname = null; + //to support http.request + if (result.search) { + result.path = '/' + result.search; + } else { + result.path = null; + } + result.href = result.format(); + return result; + } + + // if a url ENDs in . or .., then it must get a trailing slash. + // however, if it ends in anything else non-slashy, + // then it must NOT get a trailing slash. + var last = srcPath.slice(-1)[0]; + var hasTrailingSlash = ( + (result.host || relative.host || srcPath.length > 1) && + (last === '.' || last === '..') || last === ''); + + // strip single dots, resolve double dots to parent dir + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = srcPath.length; i >= 0; i--) { + last = srcPath[i]; + if (last === '.') { + srcPath.splice(i, 1); + } else if (last === '..') { + srcPath.splice(i, 1); + up++; + } else if (up) { + srcPath.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (!mustEndAbs && !removeAllDots) { + for (; up--; up) { + srcPath.unshift('..'); + } + } + + if (mustEndAbs && srcPath[0] !== '' && + (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { + srcPath.unshift(''); + } + + if (hasTrailingSlash && (srcPath.join('/').substr(-1) !== '/')) { + srcPath.push(''); + } + + var isAbsolute = srcPath[0] === '' || + (srcPath[0] && srcPath[0].charAt(0) === '/'); + + // put the host back + if (psychotic) { + result.hostname = result.host = isAbsolute ? '' : + srcPath.length ? srcPath.shift() : ''; + //occationaly the auth can get stuck only in host + //this especially happens in cases like + //url.resolveObject('mailto:local1@domain1', 'local2@domain2') + var authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + + mustEndAbs = mustEndAbs || (result.host && srcPath.length); + + if (mustEndAbs && !isAbsolute) { + srcPath.unshift(''); + } + + if (!srcPath.length) { + result.pathname = null; + result.path = null; + } else { + result.pathname = srcPath.join('/'); + } + + //to support request.http + if (!util.isNull(result.pathname) || !util.isNull(result.search)) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.auth = relative.auth || result.auth; + result.slashes = result.slashes || relative.slashes; + result.href = result.format(); + return result; +}; + +Url.prototype.parseHost = function() { + var host = this.host; + var port = portPattern.exec(host); + if (port) { + port = port[0]; + if (port !== ':') { + this.port = port.substr(1); + } + host = host.substr(0, host.length - port.length); + } + if (host) this.hostname = host; +}; + + +/***/ }), +/* 9 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +var R = typeof Reflect === 'object' ? Reflect : null +var ReflectApply = R && typeof R.apply === 'function' + ? R.apply + : function ReflectApply(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + } + +var ReflectOwnKeys +if (R && typeof R.ownKeys === 'function') { + ReflectOwnKeys = R.ownKeys +} else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target) + .concat(Object.getOwnPropertySymbols(target)); + }; +} else { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target); + }; +} + +function ProcessEmitWarning(warning) { + if (console && console.warn) console.warn(warning); +} + +var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) { + return value !== value; +} + +function EventEmitter() { + EventEmitter.init.call(this); +} +module.exports = EventEmitter; +module.exports.once = once; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +var defaultMaxListeners = 10; + +function checkListener(listener) { + if (typeof listener !== 'function') { + throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener); + } +} + +Object.defineProperty(EventEmitter, 'defaultMaxListeners', { + enumerable: true, + get: function() { + return defaultMaxListeners; + }, + set: function(arg) { + if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.'); + } + defaultMaxListeners = arg; + } +}); + +EventEmitter.init = function() { + + if (this._events === undefined || + this._events === Object.getPrototypeOf(this)._events) { + this._events = Object.create(null); + this._eventsCount = 0; + } + + this._maxListeners = this._maxListeners || undefined; +}; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) { + throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.'); + } + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) + return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) args.push(arguments[i]); + var doError = (type === 'error'); + + var events = this._events; + if (events !== undefined) + doError = (doError && events.error === undefined); + else if (!doError) + return false; + + // If there is no 'error' event listener then throw. + if (doError) { + var er; + if (args.length > 0) + er = args[0]; + if (er instanceof Error) { + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + // At least give some kind of context to the user + var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : '')); + err.context = er; + throw err; // Unhandled 'error' event + } + + var handler = events[type]; + + if (handler === undefined) + return false; + + if (typeof handler === 'function') { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); + for (var i = 0; i < len; ++i) + ReflectApply(listeners[i], this, args); + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = Object.create(null); + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit('newListener', type, + listener.listener ? listener.listener : listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === 'function') { + // Adding the second element, need to change to array. + existing = events[type] = + prepend ? [listener, existing] : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + // eslint-disable-next-line no-restricted-syntax + var w = new Error('Possible EventEmitter memory leak detected. ' + + existing.length + ' ' + String(type) + ' listeners ' + + 'added. Use emitter.setMaxListeners() to ' + + 'increase limit'); + w.name = 'MaxListenersExceededWarning'; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + + return target; +} + +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.prependListener = + function prependListener(type, listener) { + return _addListener(this, type, listener, true); + }; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) + return this.listener.call(this.target); + return this.listener.apply(this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +EventEmitter.prototype.prependOnceListener = + function prependOnceListener(type, listener) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + +// Emits a 'removeListener' event if and only if the listener was removed. +EventEmitter.prototype.removeListener = + function removeListener(type, listener) { + var list, events, position, i, originalListener; + + checkListener(listener); + + events = this._events; + if (events === undefined) + return this; + + list = events[type]; + if (list === undefined) + return this; + + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else { + delete events[type]; + if (events.removeListener) + this.emit('removeListener', type, list.listener || listener); + } + } else if (typeof list !== 'function') { + position = -1; + + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + + if (position < 0) + return this; + + if (position === 0) + list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) + events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit('removeListener', type, originalListener || listener); + } + + return this; + }; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +EventEmitter.prototype.removeAllListeners = + function removeAllListeners(type) { + var listeners, events, i; + + events = this._events; + if (events === undefined) + return this; + + // not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else + delete events[type]; + } + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = Object.create(null); + this._eventsCount = 0; + return this; + } + + listeners = events[type]; + + if (typeof listeners === 'function') { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; + }; + +function _listeners(target, type, unwrap) { + var events = target._events; + + if (events === undefined) + return []; + + var evlistener = events[type]; + if (evlistener === undefined) + return []; + + if (typeof evlistener === 'function') + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? + unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); +} + +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +EventEmitter.listenerCount = function(emitter, type) { + if (typeof emitter.listenerCount === 'function') { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } +}; + +EventEmitter.prototype.listenerCount = listenerCount; +function listenerCount(type) { + var events = this._events; + + if (events !== undefined) { + var evlistener = events[type]; + + if (typeof evlistener === 'function') { + return 1; + } else if (evlistener !== undefined) { + return evlistener.length; + } + } + + return 0; +} + +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) + copy[i] = arr[i]; + return copy; +} + +function spliceOne(list, index) { + for (; index + 1 < list.length; index++) + list[index] = list[index + 1]; + list.pop(); +} + +function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; +} + +function once(emitter, name) { + return new Promise(function (resolve, reject) { + function errorListener(err) { + emitter.removeListener(name, resolver); + reject(err); + } + + function resolver() { + if (typeof emitter.removeListener === 'function') { + emitter.removeListener('error', errorListener); + } + resolve([].slice.call(arguments)); + }; + + eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); + if (name !== 'error') { + addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); + } + }); +} + +function addErrorHandlerIfEventEmitter(emitter, handler, flags) { + if (typeof emitter.on === 'function') { + eventTargetAgnosticAddListener(emitter, 'error', handler, flags); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === 'function') { + if (flags.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === 'function') { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we do not listen for `error` events here. + emitter.addEventListener(name, function wrapListener(arg) { + // IE does not have builtin `{ once: true }` support so we + // have to do it manually. + if (flags.once) { + emitter.removeEventListener(name, wrapListener); + } + listener(arg); + }); + } else { + throw new TypeError('The "emitter" argument must be of type EventEmitter. Received type ' + typeof emitter); + } +} + + +/***/ }), +/* 10 */ +/***/ (function(module, exports) { + +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; + + +/***/ }), +/* 11 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global) {var ClientRequest = __webpack_require__(32) +var response = __webpack_require__(13) +var extend = __webpack_require__(41) +var statusCodes = __webpack_require__(42) +var url = __webpack_require__(8) + +var http = exports + +http.request = function (opts, cb) { + if (typeof opts === 'string') + opts = url.parse(opts) + else + opts = extend(opts) + + // Normally, the page is loaded from http or https, so not specifying a protocol + // will result in a (valid) protocol-relative url. However, this won't work if + // the protocol is something else, like 'file:' + var defaultProtocol = global.location.protocol.search(/^https?:$/) === -1 ? 'http:' : '' + + var protocol = opts.protocol || defaultProtocol + var host = opts.hostname || opts.host + var port = opts.port + var path = opts.path || '/' + + // Necessary for IPv6 addresses + if (host && host.indexOf(':') !== -1) + host = '[' + host + ']' + + // This may be a relative url. The browser should always be able to interpret it correctly. + opts.url = (host ? (protocol + '//' + host) : '') + (port ? ':' + port : '') + path + opts.method = (opts.method || 'GET').toUpperCase() + opts.headers = opts.headers || {} + + // Also valid opts.auth, opts.mode + + var req = new ClientRequest(opts) + if (cb) + req.on('response', cb) + return req +} + +http.get = function get (opts, cb) { + var req = http.request(opts, cb) + req.end() + return req +} + +http.ClientRequest = ClientRequest +http.IncomingMessage = response.IncomingMessage + +http.Agent = function () {} +http.Agent.defaultMaxSockets = 4 + +http.globalAgent = new http.Agent() + +http.STATUS_CODES = statusCodes + +http.METHODS = [ + 'CHECKOUT', + 'CONNECT', + 'COPY', + 'DELETE', + 'GET', + 'HEAD', + 'LOCK', + 'M-SEARCH', + 'MERGE', + 'MKACTIVITY', + 'MKCOL', + 'MOVE', + 'NOTIFY', + 'OPTIONS', + 'PATCH', + 'POST', + 'PROPFIND', + 'PROPPATCH', + 'PURGE', + 'PUT', + 'REPORT', + 'SEARCH', + 'SUBSCRIBE', + 'TRACE', + 'UNLOCK', + 'UNSUBSCRIBE' +] +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 12 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global) {exports.fetch = isFunction(global.fetch) && isFunction(global.ReadableStream) + +exports.writableStream = isFunction(global.WritableStream) + +exports.abortController = isFunction(global.AbortController) + +exports.blobConstructor = false +try { + new Blob([new ArrayBuffer(1)]) + exports.blobConstructor = true +} catch (e) {} + +// The xhr request to example.com may violate some restrictive CSP configurations, +// so if we're running in a browser that supports `fetch`, avoid calling getXHR() +// and assume support for certain features below. +var xhr +function getXHR () { + // Cache the xhr value + if (xhr !== undefined) return xhr + + if (global.XMLHttpRequest) { + xhr = new global.XMLHttpRequest() + // If XDomainRequest is available (ie only, where xhr might not work + // cross domain), use the page location. Otherwise use example.com + // Note: this doesn't actually make an http request. + try { + xhr.open('GET', global.XDomainRequest ? '/' : 'https://example.com') + } catch(e) { + xhr = null + } + } else { + // Service workers don't have XHR + xhr = null + } + return xhr +} + +function checkTypeSupport (type) { + var xhr = getXHR() + if (!xhr) return false + try { + xhr.responseType = type + return xhr.responseType === type + } catch (e) {} + return false +} + +// For some strange reason, Safari 7.0 reports typeof global.ArrayBuffer === 'object'. +// Safari 7.1 appears to have fixed this bug. +var haveArrayBuffer = typeof global.ArrayBuffer !== 'undefined' +var haveSlice = haveArrayBuffer && isFunction(global.ArrayBuffer.prototype.slice) + +// If fetch is supported, then arraybuffer will be supported too. Skip calling +// checkTypeSupport(), since that calls getXHR(). +exports.arraybuffer = exports.fetch || (haveArrayBuffer && checkTypeSupport('arraybuffer')) + +// These next two tests unavoidably show warnings in Chrome. Since fetch will always +// be used if it's available, just return false for these to avoid the warnings. +exports.msstream = !exports.fetch && haveSlice && checkTypeSupport('ms-stream') +exports.mozchunkedarraybuffer = !exports.fetch && haveArrayBuffer && + checkTypeSupport('moz-chunked-arraybuffer') + +// If fetch is supported, then overrideMimeType will be supported too. Skip calling +// getXHR(). +exports.overrideMimeType = exports.fetch || (getXHR() ? isFunction(getXHR().overrideMimeType) : false) + +exports.vbArray = isFunction(global.VBArray) + +function isFunction (value) { + return typeof value === 'function' +} + +xhr = null // Help gc + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 13 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(process, Buffer, global) {var capability = __webpack_require__(12) +var inherits = __webpack_require__(2) +var stream = __webpack_require__(14) + +var rStates = exports.readyStates = { + UNSENT: 0, + OPENED: 1, + HEADERS_RECEIVED: 2, + LOADING: 3, + DONE: 4 +} + +var IncomingMessage = exports.IncomingMessage = function (xhr, response, mode, fetchTimer) { + var self = this + stream.Readable.call(self) + + self._mode = mode + self.headers = {} + self.rawHeaders = [] + self.trailers = {} + self.rawTrailers = [] + + // Fake the 'close' event, but only once 'end' fires + self.on('end', function () { + // The nextTick is necessary to prevent the 'request' module from causing an infinite loop + process.nextTick(function () { + self.emit('close') + }) + }) + + if (mode === 'fetch') { + self._fetchResponse = response + + self.url = response.url + self.statusCode = response.status + self.statusMessage = response.statusText + + response.headers.forEach(function (header, key){ + self.headers[key.toLowerCase()] = header + self.rawHeaders.push(key, header) + }) + + if (capability.writableStream) { + var writable = new WritableStream({ + write: function (chunk) { + return new Promise(function (resolve, reject) { + if (self._destroyed) { + reject() + } else if(self.push(new Buffer(chunk))) { + resolve() + } else { + self._resumeFetch = resolve + } + }) + }, + close: function () { + global.clearTimeout(fetchTimer) + if (!self._destroyed) + self.push(null) + }, + abort: function (err) { + if (!self._destroyed) + self.emit('error', err) + } + }) + + try { + response.body.pipeTo(writable).catch(function (err) { + global.clearTimeout(fetchTimer) + if (!self._destroyed) + self.emit('error', err) + }) + return + } catch (e) {} // pipeTo method isn't defined. Can't find a better way to feature test this + } + // fallback for when writableStream or pipeTo aren't available + var reader = response.body.getReader() + function read () { + reader.read().then(function (result) { + if (self._destroyed) + return + if (result.done) { + global.clearTimeout(fetchTimer) + self.push(null) + return + } + self.push(new Buffer(result.value)) + read() + }).catch(function (err) { + global.clearTimeout(fetchTimer) + if (!self._destroyed) + self.emit('error', err) + }) + } + read() + } else { + self._xhr = xhr + self._pos = 0 + + self.url = xhr.responseURL + self.statusCode = xhr.status + self.statusMessage = xhr.statusText + var headers = xhr.getAllResponseHeaders().split(/\r?\n/) + headers.forEach(function (header) { + var matches = header.match(/^([^:]+):\s*(.*)/) + if (matches) { + var key = matches[1].toLowerCase() + if (key === 'set-cookie') { + if (self.headers[key] === undefined) { + self.headers[key] = [] + } + self.headers[key].push(matches[2]) + } else if (self.headers[key] !== undefined) { + self.headers[key] += ', ' + matches[2] + } else { + self.headers[key] = matches[2] + } + self.rawHeaders.push(matches[1], matches[2]) + } + }) + + self._charset = 'x-user-defined' + if (!capability.overrideMimeType) { + var mimeType = self.rawHeaders['mime-type'] + if (mimeType) { + var charsetMatch = mimeType.match(/;\s*charset=([^;])(;|$)/) + if (charsetMatch) { + self._charset = charsetMatch[1].toLowerCase() + } + } + if (!self._charset) + self._charset = 'utf-8' // best guess + } + } +} + +inherits(IncomingMessage, stream.Readable) + +IncomingMessage.prototype._read = function () { + var self = this + + var resolve = self._resumeFetch + if (resolve) { + self._resumeFetch = null + resolve() + } +} + +IncomingMessage.prototype._onXHRProgress = function () { + var self = this + + var xhr = self._xhr + + var response = null + switch (self._mode) { + case 'text:vbarray': // For IE9 + if (xhr.readyState !== rStates.DONE) + break + try { + // This fails in IE8 + response = new global.VBArray(xhr.responseBody).toArray() + } catch (e) {} + if (response !== null) { + self.push(new Buffer(response)) + break + } + // Falls through in IE8 + case 'text': + try { // This will fail when readyState = 3 in IE9. Switch mode and wait for readyState = 4 + response = xhr.responseText + } catch (e) { + self._mode = 'text:vbarray' + break + } + if (response.length > self._pos) { + var newData = response.substr(self._pos) + if (self._charset === 'x-user-defined') { + var buffer = new Buffer(newData.length) + for (var i = 0; i < newData.length; i++) + buffer[i] = newData.charCodeAt(i) & 0xff + + self.push(buffer) + } else { + self.push(newData, self._charset) + } + self._pos = response.length + } + break + case 'arraybuffer': + if (xhr.readyState !== rStates.DONE || !xhr.response) + break + response = xhr.response + self.push(new Buffer(new Uint8Array(response))) + break + case 'moz-chunked-arraybuffer': // take whole + response = xhr.response + if (xhr.readyState !== rStates.LOADING || !response) + break + self.push(new Buffer(new Uint8Array(response))) + break + case 'ms-stream': + response = xhr.response + if (xhr.readyState !== rStates.LOADING) + break + var reader = new global.MSStreamReader() + reader.onprogress = function () { + if (reader.result.byteLength > self._pos) { + self.push(new Buffer(new Uint8Array(reader.result.slice(self._pos)))) + self._pos = reader.result.byteLength + } + } + reader.onload = function () { + self.push(null) + } + // reader.onerror = ??? // TODO: this + reader.readAsArrayBuffer(response) + break + } + + // The ms-stream case handles end separately in reader.onload() + if (self._xhr.readyState === rStates.DONE && self._mode !== 'ms-stream') { + self.push(null) + } +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1), __webpack_require__(3).Buffer, __webpack_require__(0))) + +/***/ }), +/* 14 */ +/***/ (function(module, exports, __webpack_require__) { + +exports = module.exports = __webpack_require__(15); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = __webpack_require__(18); +exports.Duplex = __webpack_require__(4); +exports.Transform = __webpack_require__(20); +exports.PassThrough = __webpack_require__(39); + + +/***/ }), +/* 15 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(global, process) {// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +module.exports = Readable; + +/**/ +var isArray = __webpack_require__(10); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = __webpack_require__(9).EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __webpack_require__(16); +/**/ + +/**/ + +var Buffer = __webpack_require__(7).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +/**/ +var debugUtil = __webpack_require__(33); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = __webpack_require__(34); +var destroyImpl = __webpack_require__(17); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || __webpack_require__(4); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = __webpack_require__(19).StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || __webpack_require__(4); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = __webpack_require__(19).StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0), __webpack_require__(1))) + +/***/ }), +/* 16 */ +/***/ (function(module, exports, __webpack_require__) { + +module.exports = __webpack_require__(9).EventEmitter; + + +/***/ }), +/* 17 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; + +/***/ }), +/* 18 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(process, setImmediate, global) {// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +/**/ +var internalUtil = { + deprecate: __webpack_require__(38) +}; +/**/ + +/**/ +var Stream = __webpack_require__(16); +/**/ + +/**/ + +var Buffer = __webpack_require__(7).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = __webpack_require__(17); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || __webpack_require__(4); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || __webpack_require__(4); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1), __webpack_require__(36).setImmediate, __webpack_require__(0))) + +/***/ }), +/* 19 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = __webpack_require__(7).Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), +/* 20 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + + +module.exports = Transform; + +var Duplex = __webpack_require__(4); + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} + +/***/ }), +/* 21 */ +/***/ (function(module, exports, __webpack_require__) { + +var EventSource = __webpack_require__(22) + +if (typeof window === 'object') { + window.EventSourcePolyfill = EventSource + if (!window.EventSource) window.EventSource = EventSource + module.exports = window.EventSource +} else { + module.exports = EventSource +} + + +/***/ }), +/* 22 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(process, Buffer) {var parse = __webpack_require__(8).parse +var events = __webpack_require__(9) +var https = __webpack_require__(31) +var http = __webpack_require__(11) +var util = __webpack_require__(43) + +var httpsOptions = [ + 'pfx', 'key', 'passphrase', 'cert', 'ca', 'ciphers', + 'rejectUnauthorized', 'secureProtocol', 'servername', 'checkServerIdentity' +] + +var bom = [239, 187, 191] +var colon = 58 +var space = 32 +var lineFeed = 10 +var carriageReturn = 13 +// Beyond 256KB we could not observe any gain in performance +var maxBufferAheadAllocation = 1024 * 256 +// Headers matching the pattern should be removed when redirecting to different origin +var reUnsafeHeader = /^(cookie|authorization)$/i + +function hasBom (buf) { + return bom.every(function (charCode, index) { + return buf[index] === charCode + }) +} + +/** + * Creates a new EventSource object + * + * @param {String} url the URL to which to connect + * @param {Object} [eventSourceInitDict] extra init params. See README for details. + * @api public + **/ +function EventSource (url, eventSourceInitDict) { + var readyState = EventSource.CONNECTING + var headers = eventSourceInitDict && eventSourceInitDict.headers + var hasNewOrigin = false + Object.defineProperty(this, 'readyState', { + get: function () { + return readyState + } + }) + + Object.defineProperty(this, 'url', { + get: function () { + return url + } + }) + + var self = this + self.reconnectInterval = 1000 + self.connectionInProgress = false + + function onConnectionClosed (message) { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CONNECTING + _emit('error', new Event('error', {message: message})) + + // The url may have been changed by a temporary redirect. If that's the case, + // revert it now, and flag that we are no longer pointing to a new origin + if (reconnectUrl) { + url = reconnectUrl + reconnectUrl = null + hasNewOrigin = false + } + setTimeout(function () { + if (readyState !== EventSource.CONNECTING || self.connectionInProgress) { + return + } + self.connectionInProgress = true + connect() + }, self.reconnectInterval) + } + + var req + var lastEventId = '' + if (headers && headers['Last-Event-ID']) { + lastEventId = headers['Last-Event-ID'] + delete headers['Last-Event-ID'] + } + + var discardTrailingNewline = false + var data = '' + var eventName = '' + + var reconnectUrl = null + + function connect () { + var options = parse(url) + var isSecure = options.protocol === 'https:' + options.headers = { 'Cache-Control': 'no-cache', 'Accept': 'text/event-stream' } + if (lastEventId) options.headers['Last-Event-ID'] = lastEventId + if (headers) { + var reqHeaders = hasNewOrigin ? removeUnsafeHeaders(headers) : headers + for (var i in reqHeaders) { + var header = reqHeaders[i] + if (header) { + options.headers[i] = header + } + } + } + + // Legacy: this should be specified as `eventSourceInitDict.https.rejectUnauthorized`, + // but for now exists as a backwards-compatibility layer + options.rejectUnauthorized = !(eventSourceInitDict && !eventSourceInitDict.rejectUnauthorized) + + if (eventSourceInitDict && eventSourceInitDict.createConnection !== undefined) { + options.createConnection = eventSourceInitDict.createConnection + } + + // If specify http proxy, make the request to sent to the proxy server, + // and include the original url in path and Host headers + var useProxy = eventSourceInitDict && eventSourceInitDict.proxy + if (useProxy) { + var proxy = parse(eventSourceInitDict.proxy) + isSecure = proxy.protocol === 'https:' + + options.protocol = isSecure ? 'https:' : 'http:' + options.path = url + options.headers.Host = options.host + options.hostname = proxy.hostname + options.host = proxy.host + options.port = proxy.port + } + + // If https options are specified, merge them into the request options + if (eventSourceInitDict && eventSourceInitDict.https) { + for (var optName in eventSourceInitDict.https) { + if (httpsOptions.indexOf(optName) === -1) { + continue + } + + var option = eventSourceInitDict.https[optName] + if (option !== undefined) { + options[optName] = option + } + } + } + + // Pass this on to the XHR + if (eventSourceInitDict && eventSourceInitDict.withCredentials !== undefined) { + options.withCredentials = eventSourceInitDict.withCredentials + } + + req = (isSecure ? https : http).request(options, function (res) { + self.connectionInProgress = false + // Handle HTTP errors + if (res.statusCode === 500 || res.statusCode === 502 || res.statusCode === 503 || res.statusCode === 504) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + onConnectionClosed() + return + } + + // Handle HTTP redirects + if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307) { + var location = res.headers.location + if (!location) { + // Server sent redirect response without Location header. + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return + } + var prevOrigin = new URL(url).origin + var nextOrigin = new URL(location).origin + hasNewOrigin = prevOrigin !== nextOrigin + if (res.statusCode === 307) reconnectUrl = url + url = location + process.nextTick(connect) + return + } + + if (res.statusCode !== 200) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return self.close() + } + + readyState = EventSource.OPEN + res.on('close', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + + res.on('end', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + _emit('open', new Event('open')) + + // text/event-stream parser adapted from webkit's + // Source/WebCore/page/EventSource.cpp + var buf + var newBuffer + var startingPos = 0 + var startingFieldLength = -1 + var newBufferSize = 0 + var bytesUsed = 0 + + res.on('data', function (chunk) { + if (!buf) { + buf = chunk + if (hasBom(buf)) { + buf = buf.slice(bom.length) + } + bytesUsed = buf.length + } else { + if (chunk.length > buf.length - bytesUsed) { + newBufferSize = (buf.length * 2) + chunk.length + if (newBufferSize > maxBufferAheadAllocation) { + newBufferSize = buf.length + chunk.length + maxBufferAheadAllocation + } + newBuffer = Buffer.alloc(newBufferSize) + buf.copy(newBuffer, 0, 0, bytesUsed) + buf = newBuffer + } + chunk.copy(buf, bytesUsed) + bytesUsed += chunk.length + } + + var pos = 0 + var length = bytesUsed + + while (pos < length) { + if (discardTrailingNewline) { + if (buf[pos] === lineFeed) { + ++pos + } + discardTrailingNewline = false + } + + var lineLength = -1 + var fieldLength = startingFieldLength + var c + + for (var i = startingPos; lineLength < 0 && i < length; ++i) { + c = buf[i] + if (c === colon) { + if (fieldLength < 0) { + fieldLength = i - pos + } + } else if (c === carriageReturn) { + discardTrailingNewline = true + lineLength = i - pos + } else if (c === lineFeed) { + lineLength = i - pos + } + } + + if (lineLength < 0) { + startingPos = length - pos + startingFieldLength = fieldLength + break + } else { + startingPos = 0 + startingFieldLength = -1 + } + + parseEventStreamLine(buf, pos, fieldLength, lineLength) + + pos += lineLength + 1 + } + + if (pos === length) { + buf = void 0 + bytesUsed = 0 + } else if (pos > 0) { + buf = buf.slice(pos, bytesUsed) + bytesUsed = buf.length + } + }) + }) + + req.on('error', function (err) { + self.connectionInProgress = false + onConnectionClosed(err.message) + }) + + if (req.setNoDelay) req.setNoDelay(true) + req.end() + } + + connect() + + function _emit () { + if (self.listeners(arguments[0]).length > 0) { + self.emit.apply(self, arguments) + } + } + + this._close = function () { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CLOSED + if (req.abort) req.abort() + if (req.xhr && req.xhr.abort) req.xhr.abort() + } + + function parseEventStreamLine (buf, pos, fieldLength, lineLength) { + if (lineLength === 0) { + if (data.length > 0) { + var type = eventName || 'message' + _emit(type, new MessageEvent(type, { + data: data.slice(0, -1), // remove trailing newline + lastEventId: lastEventId, + origin: new URL(url).origin + })) + data = '' + } + eventName = void 0 + } else if (fieldLength > 0) { + var noValue = fieldLength < 0 + var step = 0 + var field = buf.slice(pos, pos + (noValue ? lineLength : fieldLength)).toString() + + if (noValue) { + step = lineLength + } else if (buf[pos + fieldLength + 1] !== space) { + step = fieldLength + 1 + } else { + step = fieldLength + 2 + } + pos += step + + var valueLength = lineLength - step + var value = buf.slice(pos, pos + valueLength).toString() + + if (field === 'data') { + data += value + '\n' + } else if (field === 'event') { + eventName = value + } else if (field === 'id') { + lastEventId = value + } else if (field === 'retry') { + var retry = parseInt(value, 10) + if (!Number.isNaN(retry)) { + self.reconnectInterval = retry + } + } + } + } +} + +module.exports = EventSource + +util.inherits(EventSource, events.EventEmitter) +EventSource.prototype.constructor = EventSource; // make stacktraces readable + +['open', 'error', 'message'].forEach(function (method) { + Object.defineProperty(EventSource.prototype, 'on' + method, { + /** + * Returns the current listener + * + * @return {Mixed} the set function or undefined + * @api private + */ + get: function get () { + var listener = this.listeners(method)[0] + return listener ? (listener._listener ? listener._listener : listener) : undefined + }, + + /** + * Start listening for events + * + * @param {Function} listener the listener + * @return {Mixed} the set function or undefined + * @api private + */ + set: function set (listener) { + this.removeAllListeners(method) + this.addEventListener(method, listener) + } + }) +}) + +/** + * Ready states + */ +Object.defineProperty(EventSource, 'CONNECTING', {enumerable: true, value: 0}) +Object.defineProperty(EventSource, 'OPEN', {enumerable: true, value: 1}) +Object.defineProperty(EventSource, 'CLOSED', {enumerable: true, value: 2}) + +EventSource.prototype.CONNECTING = 0 +EventSource.prototype.OPEN = 1 +EventSource.prototype.CLOSED = 2 + +/** + * Closes the connection, if one is made, and sets the readyState attribute to 2 (closed) + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventSource/close + * @api public + */ +EventSource.prototype.close = function () { + this._close() +} + +/** + * Emulates the W3C Browser based WebSocket interface using addEventListener. + * + * @param {String} type A string representing the event type to listen out for + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.addEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.addEventListener = function addEventListener (type, listener) { + if (typeof listener === 'function') { + // store a reference so we can return the original function again + listener._listener = listener + this.on(type, listener) + } +} + +/** + * Emulates the W3C Browser based WebSocket interface using dispatchEvent. + * + * @param {Event} event An event to be dispatched + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/dispatchEvent + * @api public + */ +EventSource.prototype.dispatchEvent = function dispatchEvent (event) { + if (!event.type) { + throw new Error('UNSPECIFIED_EVENT_TYPE_ERR') + } + // if event is instance of an CustomEvent (or has 'details' property), + // send the detail object as the payload for the event + this.emit(event.type, event.detail) +} + +/** + * Emulates the W3C Browser based WebSocket interface using removeEventListener. + * + * @param {String} type A string representing the event type to remove + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.removeEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.removeEventListener = function removeEventListener (type, listener) { + if (typeof listener === 'function') { + listener._listener = undefined + this.removeListener(type, listener) + } +} + +/** + * W3C Event + * + * @see http://www.w3.org/TR/DOM-Level-3-Events/#interface-Event + * @api private + */ +function Event (type, optionalProperties) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + if (optionalProperties) { + for (var f in optionalProperties) { + if (optionalProperties.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: optionalProperties[f], enumerable: true }) + } + } + } +} + +/** + * W3C MessageEvent + * + * @see http://www.w3.org/TR/webmessaging/#event-definitions + * @api private + */ +function MessageEvent (type, eventInitDict) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + for (var f in eventInitDict) { + if (eventInitDict.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: eventInitDict[f], enumerable: true }) + } + } +} + +/** + * Returns a new object of headers that does not include any authorization and cookie headers + * + * @param {Object} headers An object of headers ({[headerName]: headerValue}) + * @return {Object} a new object of headers + * @api private + */ +function removeUnsafeHeaders (headers) { + var safe = {} + for (var key in headers) { + if (reUnsafeHeader.test(key)) { + continue + } + + safe[key] = headers[key] + } + + return safe +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1), __webpack_require__(3).Buffer)) + +/***/ }), +/* 23 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray + +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} + +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 + +function getLens (b64) { + var len = b64.length + + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} + + +/***/ }), +/* 24 */ +/***/ (function(module, exports) { + +/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */ +exports.read = function (buffer, offset, isLE, mLen, nBytes) { + var e, m + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var nBits = -7 + var i = isLE ? (nBytes - 1) : 0 + var d = isLE ? -1 : 1 + var s = buffer[offset + i] + + i += d + + e = s & ((1 << (-nBits)) - 1) + s >>= (-nBits) + nBits += eLen + for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + m = e & ((1 << (-nBits)) - 1) + e >>= (-nBits) + nBits += mLen + for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + if (e === 0) { + e = 1 - eBias + } else if (e === eMax) { + return m ? NaN : ((s ? -1 : 1) * Infinity) + } else { + m = m + Math.pow(2, mLen) + e = e - eBias + } + return (s ? -1 : 1) * m * Math.pow(2, e - mLen) +} + +exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { + var e, m, c + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) + var i = isLE ? 0 : (nBytes - 1) + var d = isLE ? 1 : -1 + var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 + + value = Math.abs(value) + + if (isNaN(value) || value === Infinity) { + m = isNaN(value) ? 1 : 0 + e = eMax + } else { + e = Math.floor(Math.log(value) / Math.LN2) + if (value * (c = Math.pow(2, -e)) < 1) { + e-- + c *= 2 + } + if (e + eBias >= 1) { + value += rt / c + } else { + value += rt * Math.pow(2, 1 - eBias) + } + if (value * c >= 2) { + e++ + c /= 2 + } + + if (e + eBias >= eMax) { + m = 0 + e = eMax + } else if (e + eBias >= 1) { + m = ((value * c) - 1) * Math.pow(2, mLen) + e = e + eBias + } else { + m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) + e = 0 + } + } + + for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} + + e = (e << mLen) | m + eLen += mLen + for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} + + buffer[offset + i - d] |= s * 128 +} + + +/***/ }), +/* 25 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(module, global) {var __WEBPACK_AMD_DEFINE_RESULT__;/*! https://mths.be/punycode v1.4.1 by @mathias */ +;(function(root) { + + /** Detect free variables */ + var freeExports = typeof exports == 'object' && exports && + !exports.nodeType && exports; + var freeModule = typeof module == 'object' && module && + !module.nodeType && module; + var freeGlobal = typeof global == 'object' && global; + if ( + freeGlobal.global === freeGlobal || + freeGlobal.window === freeGlobal || + freeGlobal.self === freeGlobal + ) { + root = freeGlobal; + } + + /** + * The `punycode` object. + * @name punycode + * @type Object + */ + var punycode, + + /** Highest positive signed 32-bit float value */ + maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 + + /** Bootstring parameters */ + base = 36, + tMin = 1, + tMax = 26, + skew = 38, + damp = 700, + initialBias = 72, + initialN = 128, // 0x80 + delimiter = '-', // '\x2D' + + /** Regular expressions */ + regexPunycode = /^xn--/, + regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars + regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators + + /** Error messages */ + errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' + }, + + /** Convenience shortcuts */ + baseMinusTMin = base - tMin, + floor = Math.floor, + stringFromCharCode = String.fromCharCode, + + /** Temporary variable */ + key; + + /*--------------------------------------------------------------------------*/ + + /** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ + function error(type) { + throw new RangeError(errors[type]); + } + + /** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ + function map(array, fn) { + var length = array.length; + var result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; + } + + /** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ + function mapDomain(string, fn) { + var parts = string.split('@'); + var result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + var labels = string.split('.'); + var encoded = map(labels, fn).join('.'); + return result + encoded; + } + + /** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ + function ucs2decode(string) { + var output = [], + counter = 0, + length = string.length, + value, + extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // low surrogate + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; + } + + /** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ + function ucs2encode(array) { + return map(array, function(value) { + var output = ''; + if (value > 0xFFFF) { + value -= 0x10000; + output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); + value = 0xDC00 | value & 0x3FF; + } + output += stringFromCharCode(value); + return output; + }).join(''); + } + + /** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ + function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; + } + + /** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ + function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); + } + + /** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ + function adapt(delta, numPoints, firstTime) { + var k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); + } + + /** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ + function decode(input) { + // Don't use UCS-2 + var output = [], + inputLength = input.length, + out, + i = 0, + n = initialN, + bias = initialBias, + basic, + j, + index, + oldi, + w, + k, + digit, + t, + /** Cached calculation results */ + baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + + } + + return ucs2encode(output); + } + + /** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ + function encode(input) { + var n, + delta, + handledCPCount, + basicLength, + bias, + j, + m, + q, + k, + t, + currentValue, + output = [], + /** `inputLength` will hold the number of code points in `input`. */ + inputLength, + /** Cached calculation results */ + handledCPCountPlusOne, + baseMinusT, + qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base; /* no condition */; k += base) { + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); + } + + /** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ + function toUnicode(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); + } + + /** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ + function toASCII(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); + } + + /*--------------------------------------------------------------------------*/ + + /** Define the public API */ + punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '1.4.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode + }; + + /** Expose `punycode` */ + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + true + ) { + !(__WEBPACK_AMD_DEFINE_RESULT__ = (function() { + return punycode; + }).call(exports, __webpack_require__, exports, module), + __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); + } else if (freeExports && freeModule) { + if (module.exports == freeExports) { + // in Node.js, io.js, or RingoJS v0.8.0+ + freeModule.exports = punycode; + } else { + // in Narwhal or RingoJS v0.7.0- + for (key in punycode) { + punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]); + } + } + } else { + // in Rhino or a web browser + root.punycode = punycode; + } + +}(this)); + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(26)(module), __webpack_require__(0))) + +/***/ }), +/* 26 */ +/***/ (function(module, exports) { + +module.exports = function(module) { + if(!module.webpackPolyfill) { + module.deprecate = function() {}; + module.paths = []; + // module.parent = undefined by default + if(!module.children) module.children = []; + Object.defineProperty(module, "loaded", { + enumerable: true, + get: function() { + return module.l; + } + }); + Object.defineProperty(module, "id", { + enumerable: true, + get: function() { + return module.i; + } + }); + module.webpackPolyfill = 1; + } + return module; +}; + + +/***/ }), +/* 27 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +module.exports = { + isString: function(arg) { + return typeof(arg) === 'string'; + }, + isObject: function(arg) { + return typeof(arg) === 'object' && arg !== null; + }, + isNull: function(arg) { + return arg === null; + }, + isNullOrUndefined: function(arg) { + return arg == null; + } +}; + + +/***/ }), +/* 28 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +exports.decode = exports.parse = __webpack_require__(29); +exports.encode = exports.stringify = __webpack_require__(30); + + +/***/ }), +/* 29 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +// If obj.hasOwnProperty has been overridden, then calling +// obj.hasOwnProperty(prop) will break. +// See: https://github.com/joyent/node/issues/1707 +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} + +module.exports = function(qs, sep, eq, options) { + sep = sep || '&'; + eq = eq || '='; + var obj = {}; + + if (typeof qs !== 'string' || qs.length === 0) { + return obj; + } + + var regexp = /\+/g; + qs = qs.split(sep); + + var maxKeys = 1000; + if (options && typeof options.maxKeys === 'number') { + maxKeys = options.maxKeys; + } + + var len = qs.length; + // maxKeys <= 0 means that we should not limit keys count + if (maxKeys > 0 && len > maxKeys) { + len = maxKeys; + } + + for (var i = 0; i < len; ++i) { + var x = qs[i].replace(regexp, '%20'), + idx = x.indexOf(eq), + kstr, vstr, k, v; + + if (idx >= 0) { + kstr = x.substr(0, idx); + vstr = x.substr(idx + 1); + } else { + kstr = x; + vstr = ''; + } + + k = decodeURIComponent(kstr); + v = decodeURIComponent(vstr); + + if (!hasOwnProperty(obj, k)) { + obj[k] = v; + } else if (isArray(obj[k])) { + obj[k].push(v); + } else { + obj[k] = [obj[k], v]; + } + } + + return obj; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + + +/***/ }), +/* 30 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +var stringifyPrimitive = function(v) { + switch (typeof v) { + case 'string': + return v; + + case 'boolean': + return v ? 'true' : 'false'; + + case 'number': + return isFinite(v) ? v : ''; + + default: + return ''; + } +}; + +module.exports = function(obj, sep, eq, name) { + sep = sep || '&'; + eq = eq || '='; + if (obj === null) { + obj = undefined; + } + + if (typeof obj === 'object') { + return map(objectKeys(obj), function(k) { + var ks = encodeURIComponent(stringifyPrimitive(k)) + eq; + if (isArray(obj[k])) { + return map(obj[k], function(v) { + return ks + encodeURIComponent(stringifyPrimitive(v)); + }).join(sep); + } else { + return ks + encodeURIComponent(stringifyPrimitive(obj[k])); + } + }).join(sep); + + } + + if (!name) return ''; + return encodeURIComponent(stringifyPrimitive(name)) + eq + + encodeURIComponent(stringifyPrimitive(obj)); +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + +function map (xs, f) { + if (xs.map) return xs.map(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + res.push(f(xs[i], i)); + } + return res; +} + +var objectKeys = Object.keys || function (obj) { + var res = []; + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key); + } + return res; +}; + + +/***/ }), +/* 31 */ +/***/ (function(module, exports, __webpack_require__) { + +var http = __webpack_require__(11) +var url = __webpack_require__(8) + +var https = module.exports + +for (var key in http) { + if (http.hasOwnProperty(key)) https[key] = http[key] +} + +https.request = function (params, cb) { + params = validateParams(params) + return http.request.call(this, params, cb) +} + +https.get = function (params, cb) { + params = validateParams(params) + return http.get.call(this, params, cb) +} + +function validateParams (params) { + if (typeof params === 'string') { + params = url.parse(params) + } + if (!params.protocol) { + params.protocol = 'https:' + } + if (params.protocol !== 'https:') { + throw new Error('Protocol "' + params.protocol + '" not supported. Expected "https:"') + } + return params +} + + +/***/ }), +/* 32 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(Buffer, global, process) {var capability = __webpack_require__(12) +var inherits = __webpack_require__(2) +var response = __webpack_require__(13) +var stream = __webpack_require__(14) +var toArrayBuffer = __webpack_require__(40) + +var IncomingMessage = response.IncomingMessage +var rStates = response.readyStates + +function decideMode (preferBinary, useFetch) { + if (capability.fetch && useFetch) { + return 'fetch' + } else if (capability.mozchunkedarraybuffer) { + return 'moz-chunked-arraybuffer' + } else if (capability.msstream) { + return 'ms-stream' + } else if (capability.arraybuffer && preferBinary) { + return 'arraybuffer' + } else if (capability.vbArray && preferBinary) { + return 'text:vbarray' + } else { + return 'text' + } +} + +var ClientRequest = module.exports = function (opts) { + var self = this + stream.Writable.call(self) + + self._opts = opts + self._body = [] + self._headers = {} + if (opts.auth) + self.setHeader('Authorization', 'Basic ' + new Buffer(opts.auth).toString('base64')) + Object.keys(opts.headers).forEach(function (name) { + self.setHeader(name, opts.headers[name]) + }) + + var preferBinary + var useFetch = true + if (opts.mode === 'disable-fetch' || ('requestTimeout' in opts && !capability.abortController)) { + // If the use of XHR should be preferred. Not typically needed. + useFetch = false + preferBinary = true + } else if (opts.mode === 'prefer-streaming') { + // If streaming is a high priority but binary compatibility and + // the accuracy of the 'content-type' header aren't + preferBinary = false + } else if (opts.mode === 'allow-wrong-content-type') { + // If streaming is more important than preserving the 'content-type' header + preferBinary = !capability.overrideMimeType + } else if (!opts.mode || opts.mode === 'default' || opts.mode === 'prefer-fast') { + // Use binary if text streaming may corrupt data or the content-type header, or for speed + preferBinary = true + } else { + throw new Error('Invalid value for opts.mode') + } + self._mode = decideMode(preferBinary, useFetch) + self._fetchTimer = null + + self.on('finish', function () { + self._onFinish() + }) +} + +inherits(ClientRequest, stream.Writable) + +ClientRequest.prototype.setHeader = function (name, value) { + var self = this + var lowerName = name.toLowerCase() + // This check is not necessary, but it prevents warnings from browsers about setting unsafe + // headers. To be honest I'm not entirely sure hiding these warnings is a good thing, but + // http-browserify did it, so I will too. + if (unsafeHeaders.indexOf(lowerName) !== -1) + return + + self._headers[lowerName] = { + name: name, + value: value + } +} + +ClientRequest.prototype.getHeader = function (name) { + var header = this._headers[name.toLowerCase()] + if (header) + return header.value + return null +} + +ClientRequest.prototype.removeHeader = function (name) { + var self = this + delete self._headers[name.toLowerCase()] +} + +ClientRequest.prototype._onFinish = function () { + var self = this + + if (self._destroyed) + return + var opts = self._opts + + var headersObj = self._headers + var body = null + if (opts.method !== 'GET' && opts.method !== 'HEAD') { + if (capability.arraybuffer) { + body = toArrayBuffer(Buffer.concat(self._body)) + } else if (capability.blobConstructor) { + body = new global.Blob(self._body.map(function (buffer) { + return toArrayBuffer(buffer) + }), { + type: (headersObj['content-type'] || {}).value || '' + }) + } else { + // get utf8 string + body = Buffer.concat(self._body).toString() + } + } + + // create flattened list of headers + var headersList = [] + Object.keys(headersObj).forEach(function (keyName) { + var name = headersObj[keyName].name + var value = headersObj[keyName].value + if (Array.isArray(value)) { + value.forEach(function (v) { + headersList.push([name, v]) + }) + } else { + headersList.push([name, value]) + } + }) + + if (self._mode === 'fetch') { + var signal = null + var fetchTimer = null + if (capability.abortController) { + var controller = new AbortController() + signal = controller.signal + self._fetchAbortController = controller + + if ('requestTimeout' in opts && opts.requestTimeout !== 0) { + self._fetchTimer = global.setTimeout(function () { + self.emit('requestTimeout') + if (self._fetchAbortController) + self._fetchAbortController.abort() + }, opts.requestTimeout) + } + } + + global.fetch(self._opts.url, { + method: self._opts.method, + headers: headersList, + body: body || undefined, + mode: 'cors', + credentials: opts.withCredentials ? 'include' : 'same-origin', + signal: signal + }).then(function (response) { + self._fetchResponse = response + self._connect() + }, function (reason) { + global.clearTimeout(self._fetchTimer) + if (!self._destroyed) + self.emit('error', reason) + }) + } else { + var xhr = self._xhr = new global.XMLHttpRequest() + try { + xhr.open(self._opts.method, self._opts.url, true) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + + // Can't set responseType on really old browsers + if ('responseType' in xhr) + xhr.responseType = self._mode.split(':')[0] + + if ('withCredentials' in xhr) + xhr.withCredentials = !!opts.withCredentials + + if (self._mode === 'text' && 'overrideMimeType' in xhr) + xhr.overrideMimeType('text/plain; charset=x-user-defined') + + if ('requestTimeout' in opts) { + xhr.timeout = opts.requestTimeout + xhr.ontimeout = function () { + self.emit('requestTimeout') + } + } + + headersList.forEach(function (header) { + xhr.setRequestHeader(header[0], header[1]) + }) + + self._response = null + xhr.onreadystatechange = function () { + switch (xhr.readyState) { + case rStates.LOADING: + case rStates.DONE: + self._onXHRProgress() + break + } + } + // Necessary for streaming in Firefox, since xhr.response is ONLY defined + // in onprogress, not in onreadystatechange with xhr.readyState = 3 + if (self._mode === 'moz-chunked-arraybuffer') { + xhr.onprogress = function () { + self._onXHRProgress() + } + } + + xhr.onerror = function () { + if (self._destroyed) + return + self.emit('error', new Error('XHR error')) + } + + try { + xhr.send(body) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + } +} + +/** + * Checks if xhr.status is readable and non-zero, indicating no error. + * Even though the spec says it should be available in readyState 3, + * accessing it throws an exception in IE8 + */ +function statusValid (xhr) { + try { + var status = xhr.status + return (status !== null && status !== 0) + } catch (e) { + return false + } +} + +ClientRequest.prototype._onXHRProgress = function () { + var self = this + + if (!statusValid(self._xhr) || self._destroyed) + return + + if (!self._response) + self._connect() + + self._response._onXHRProgress() +} + +ClientRequest.prototype._connect = function () { + var self = this + + if (self._destroyed) + return + + self._response = new IncomingMessage(self._xhr, self._fetchResponse, self._mode, self._fetchTimer) + self._response.on('error', function(err) { + self.emit('error', err) + }) + + self.emit('response', self._response) +} + +ClientRequest.prototype._write = function (chunk, encoding, cb) { + var self = this + + self._body.push(chunk) + cb() +} + +ClientRequest.prototype.abort = ClientRequest.prototype.destroy = function () { + var self = this + self._destroyed = true + global.clearTimeout(self._fetchTimer) + if (self._response) + self._response._destroyed = true + if (self._xhr) + self._xhr.abort() + else if (self._fetchAbortController) + self._fetchAbortController.abort() +} + +ClientRequest.prototype.end = function (data, encoding, cb) { + var self = this + if (typeof data === 'function') { + cb = data + data = undefined + } + + stream.Writable.prototype.end.call(self, data, encoding, cb) +} + +ClientRequest.prototype.flushHeaders = function () {} +ClientRequest.prototype.setTimeout = function () {} +ClientRequest.prototype.setNoDelay = function () {} +ClientRequest.prototype.setSocketKeepAlive = function () {} + +// Taken from http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader%28%29-method +var unsafeHeaders = [ + 'accept-charset', + 'accept-encoding', + 'access-control-request-headers', + 'access-control-request-method', + 'connection', + 'content-length', + 'cookie', + 'cookie2', + 'date', + 'dnt', + 'expect', + 'host', + 'keep-alive', + 'origin', + 'referer', + 'te', + 'trailer', + 'transfer-encoding', + 'upgrade', + 'via' +] + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(3).Buffer, __webpack_require__(0), __webpack_require__(1))) + +/***/ }), +/* 33 */ +/***/ (function(module, exports) { + +/* (ignored) */ + +/***/ }), +/* 34 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = __webpack_require__(7).Buffer; +var util = __webpack_require__(35); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} + +/***/ }), +/* 35 */ +/***/ (function(module, exports) { + +/* (ignored) */ + +/***/ }), +/* 36 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global) {var scope = (typeof global !== "undefined" && global) || + (typeof self !== "undefined" && self) || + window; +var apply = Function.prototype.apply; + +// DOM APIs, for completeness + +exports.setTimeout = function() { + return new Timeout(apply.call(setTimeout, scope, arguments), clearTimeout); +}; +exports.setInterval = function() { + return new Timeout(apply.call(setInterval, scope, arguments), clearInterval); +}; +exports.clearTimeout = +exports.clearInterval = function(timeout) { + if (timeout) { + timeout.close(); + } +}; + +function Timeout(id, clearFn) { + this._id = id; + this._clearFn = clearFn; +} +Timeout.prototype.unref = Timeout.prototype.ref = function() {}; +Timeout.prototype.close = function() { + this._clearFn.call(scope, this._id); +}; + +// Does not start the time, just sets up the members needed. +exports.enroll = function(item, msecs) { + clearTimeout(item._idleTimeoutId); + item._idleTimeout = msecs; +}; + +exports.unenroll = function(item) { + clearTimeout(item._idleTimeoutId); + item._idleTimeout = -1; +}; + +exports._unrefActive = exports.active = function(item) { + clearTimeout(item._idleTimeoutId); + + var msecs = item._idleTimeout; + if (msecs >= 0) { + item._idleTimeoutId = setTimeout(function onTimeout() { + if (item._onTimeout) + item._onTimeout(); + }, msecs); + } +}; + +// setimmediate attaches itself to the global object +__webpack_require__(37); +// On some exotic environments, it's not clear which object `setimmediate` was +// able to install onto. Search each possibility in the same order as the +// `setimmediate` library. +exports.setImmediate = (typeof self !== "undefined" && self.setImmediate) || + (typeof global !== "undefined" && global.setImmediate) || + (this && this.setImmediate); +exports.clearImmediate = (typeof self !== "undefined" && self.clearImmediate) || + (typeof global !== "undefined" && global.clearImmediate) || + (this && this.clearImmediate); + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 37 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global, process) {(function (global, undefined) { + "use strict"; + + if (global.setImmediate) { + return; + } + + var nextHandle = 1; // Spec says greater than zero + var tasksByHandle = {}; + var currentlyRunningATask = false; + var doc = global.document; + var registerImmediate; + + function setImmediate(callback) { + // Callback can either be a function or a string + if (typeof callback !== "function") { + callback = new Function("" + callback); + } + // Copy function arguments + var args = new Array(arguments.length - 1); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i + 1]; + } + // Store and register the task + var task = { callback: callback, args: args }; + tasksByHandle[nextHandle] = task; + registerImmediate(nextHandle); + return nextHandle++; + } + + function clearImmediate(handle) { + delete tasksByHandle[handle]; + } + + function run(task) { + var callback = task.callback; + var args = task.args; + switch (args.length) { + case 0: + callback(); + break; + case 1: + callback(args[0]); + break; + case 2: + callback(args[0], args[1]); + break; + case 3: + callback(args[0], args[1], args[2]); + break; + default: + callback.apply(undefined, args); + break; + } + } + + function runIfPresent(handle) { + // From the spec: "Wait until any invocations of this algorithm started before this one have completed." + // So if we're currently running a task, we'll need to delay this invocation. + if (currentlyRunningATask) { + // Delay by doing a setTimeout. setImmediate was tried instead, but in Firefox 7 it generated a + // "too much recursion" error. + setTimeout(runIfPresent, 0, handle); + } else { + var task = tasksByHandle[handle]; + if (task) { + currentlyRunningATask = true; + try { + run(task); + } finally { + clearImmediate(handle); + currentlyRunningATask = false; + } + } + } + } + + function installNextTickImplementation() { + registerImmediate = function(handle) { + process.nextTick(function () { runIfPresent(handle); }); + }; + } + + function canUsePostMessage() { + // The test against `importScripts` prevents this implementation from being installed inside a web worker, + // where `global.postMessage` means something completely different and can't be used for this purpose. + if (global.postMessage && !global.importScripts) { + var postMessageIsAsynchronous = true; + var oldOnMessage = global.onmessage; + global.onmessage = function() { + postMessageIsAsynchronous = false; + }; + global.postMessage("", "*"); + global.onmessage = oldOnMessage; + return postMessageIsAsynchronous; + } + } + + function installPostMessageImplementation() { + // Installs an event handler on `global` for the `message` event: see + // * https://developer.mozilla.org/en/DOM/window.postMessage + // * http://www.whatwg.org/specs/web-apps/current-work/multipage/comms.html#crossDocumentMessages + + var messagePrefix = "setImmediate$" + Math.random() + "$"; + var onGlobalMessage = function(event) { + if (event.source === global && + typeof event.data === "string" && + event.data.indexOf(messagePrefix) === 0) { + runIfPresent(+event.data.slice(messagePrefix.length)); + } + }; + + if (global.addEventListener) { + global.addEventListener("message", onGlobalMessage, false); + } else { + global.attachEvent("onmessage", onGlobalMessage); + } + + registerImmediate = function(handle) { + global.postMessage(messagePrefix + handle, "*"); + }; + } + + function installMessageChannelImplementation() { + var channel = new MessageChannel(); + channel.port1.onmessage = function(event) { + var handle = event.data; + runIfPresent(handle); + }; + + registerImmediate = function(handle) { + channel.port2.postMessage(handle); + }; + } + + function installReadyStateChangeImplementation() { + var html = doc.documentElement; + registerImmediate = function(handle) { + // Create a + + + diff --git a/node_modules/eventsource/example/sse-client.js b/node_modules/eventsource/example/sse-client.js new file mode 100644 index 0000000..72c49a9 --- /dev/null +++ b/node_modules/eventsource/example/sse-client.js @@ -0,0 +1,5 @@ +var EventSource = require('..') +var es = new EventSource('http://localhost:8080/sse') +es.addEventListener('server-time', function (e) { + console.log(e.data) +}) diff --git a/node_modules/eventsource/example/sse-server.js b/node_modules/eventsource/example/sse-server.js new file mode 100644 index 0000000..034e3b4 --- /dev/null +++ b/node_modules/eventsource/example/sse-server.js @@ -0,0 +1,29 @@ +const express = require('express') +const serveStatic = require('serve-static') +const SseStream = require('ssestream') + +const app = express() +app.use(serveStatic(__dirname)) +app.get('/sse', (req, res) => { + console.log('new connection') + + const sseStream = new SseStream(req) + sseStream.pipe(res) + const pusher = setInterval(() => { + sseStream.write({ + event: 'server-time', + data: new Date().toTimeString() + }) + }, 1000) + + res.on('close', () => { + console.log('lost connection') + clearInterval(pusher) + sseStream.unpipe(res) + }) +}) + +app.listen(8080, (err) => { + if (err) throw err + console.log('server ready on http://localhost:8080') +}) diff --git a/node_modules/eventsource/lib/eventsource-polyfill.js b/node_modules/eventsource/lib/eventsource-polyfill.js new file mode 100644 index 0000000..6ed4396 --- /dev/null +++ b/node_modules/eventsource/lib/eventsource-polyfill.js @@ -0,0 +1,9 @@ +var EventSource = require('./eventsource') + +if (typeof window === 'object') { + window.EventSourcePolyfill = EventSource + if (!window.EventSource) window.EventSource = EventSource + module.exports = window.EventSource +} else { + module.exports = EventSource +} diff --git a/node_modules/eventsource/lib/eventsource.js b/node_modules/eventsource/lib/eventsource.js new file mode 100644 index 0000000..bd401a1 --- /dev/null +++ b/node_modules/eventsource/lib/eventsource.js @@ -0,0 +1,495 @@ +var parse = require('url').parse +var events = require('events') +var https = require('https') +var http = require('http') +var util = require('util') + +var httpsOptions = [ + 'pfx', 'key', 'passphrase', 'cert', 'ca', 'ciphers', + 'rejectUnauthorized', 'secureProtocol', 'servername', 'checkServerIdentity' +] + +var bom = [239, 187, 191] +var colon = 58 +var space = 32 +var lineFeed = 10 +var carriageReturn = 13 +// Beyond 256KB we could not observe any gain in performance +var maxBufferAheadAllocation = 1024 * 256 +// Headers matching the pattern should be removed when redirecting to different origin +var reUnsafeHeader = /^(cookie|authorization)$/i + +function hasBom (buf) { + return bom.every(function (charCode, index) { + return buf[index] === charCode + }) +} + +/** + * Creates a new EventSource object + * + * @param {String} url the URL to which to connect + * @param {Object} [eventSourceInitDict] extra init params. See README for details. + * @api public + **/ +function EventSource (url, eventSourceInitDict) { + var readyState = EventSource.CONNECTING + var headers = eventSourceInitDict && eventSourceInitDict.headers + var hasNewOrigin = false + Object.defineProperty(this, 'readyState', { + get: function () { + return readyState + } + }) + + Object.defineProperty(this, 'url', { + get: function () { + return url + } + }) + + var self = this + self.reconnectInterval = 1000 + self.connectionInProgress = false + + function onConnectionClosed (message) { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CONNECTING + _emit('error', new Event('error', {message: message})) + + // The url may have been changed by a temporary redirect. If that's the case, + // revert it now, and flag that we are no longer pointing to a new origin + if (reconnectUrl) { + url = reconnectUrl + reconnectUrl = null + hasNewOrigin = false + } + setTimeout(function () { + if (readyState !== EventSource.CONNECTING || self.connectionInProgress) { + return + } + self.connectionInProgress = true + connect() + }, self.reconnectInterval) + } + + var req + var lastEventId = '' + if (headers && headers['Last-Event-ID']) { + lastEventId = headers['Last-Event-ID'] + delete headers['Last-Event-ID'] + } + + var discardTrailingNewline = false + var data = '' + var eventName = '' + + var reconnectUrl = null + + function connect () { + var options = parse(url) + var isSecure = options.protocol === 'https:' + options.headers = { 'Cache-Control': 'no-cache', 'Accept': 'text/event-stream' } + if (lastEventId) options.headers['Last-Event-ID'] = lastEventId + if (headers) { + var reqHeaders = hasNewOrigin ? removeUnsafeHeaders(headers) : headers + for (var i in reqHeaders) { + var header = reqHeaders[i] + if (header) { + options.headers[i] = header + } + } + } + + // Legacy: this should be specified as `eventSourceInitDict.https.rejectUnauthorized`, + // but for now exists as a backwards-compatibility layer + options.rejectUnauthorized = !(eventSourceInitDict && !eventSourceInitDict.rejectUnauthorized) + + if (eventSourceInitDict && eventSourceInitDict.createConnection !== undefined) { + options.createConnection = eventSourceInitDict.createConnection + } + + // If specify http proxy, make the request to sent to the proxy server, + // and include the original url in path and Host headers + var useProxy = eventSourceInitDict && eventSourceInitDict.proxy + if (useProxy) { + var proxy = parse(eventSourceInitDict.proxy) + isSecure = proxy.protocol === 'https:' + + options.protocol = isSecure ? 'https:' : 'http:' + options.path = url + options.headers.Host = options.host + options.hostname = proxy.hostname + options.host = proxy.host + options.port = proxy.port + } + + // If https options are specified, merge them into the request options + if (eventSourceInitDict && eventSourceInitDict.https) { + for (var optName in eventSourceInitDict.https) { + if (httpsOptions.indexOf(optName) === -1) { + continue + } + + var option = eventSourceInitDict.https[optName] + if (option !== undefined) { + options[optName] = option + } + } + } + + // Pass this on to the XHR + if (eventSourceInitDict && eventSourceInitDict.withCredentials !== undefined) { + options.withCredentials = eventSourceInitDict.withCredentials + } + + req = (isSecure ? https : http).request(options, function (res) { + self.connectionInProgress = false + // Handle HTTP errors + if (res.statusCode === 500 || res.statusCode === 502 || res.statusCode === 503 || res.statusCode === 504) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + onConnectionClosed() + return + } + + // Handle HTTP redirects + if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307) { + var location = res.headers.location + if (!location) { + // Server sent redirect response without Location header. + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return + } + var prevOrigin = new URL(url).origin + var nextOrigin = new URL(location).origin + hasNewOrigin = prevOrigin !== nextOrigin + if (res.statusCode === 307) reconnectUrl = url + url = location + process.nextTick(connect) + return + } + + if (res.statusCode !== 200) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return self.close() + } + + readyState = EventSource.OPEN + res.on('close', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + + res.on('end', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + _emit('open', new Event('open')) + + // text/event-stream parser adapted from webkit's + // Source/WebCore/page/EventSource.cpp + var buf + var newBuffer + var startingPos = 0 + var startingFieldLength = -1 + var newBufferSize = 0 + var bytesUsed = 0 + + res.on('data', function (chunk) { + if (!buf) { + buf = chunk + if (hasBom(buf)) { + buf = buf.slice(bom.length) + } + bytesUsed = buf.length + } else { + if (chunk.length > buf.length - bytesUsed) { + newBufferSize = (buf.length * 2) + chunk.length + if (newBufferSize > maxBufferAheadAllocation) { + newBufferSize = buf.length + chunk.length + maxBufferAheadAllocation + } + newBuffer = Buffer.alloc(newBufferSize) + buf.copy(newBuffer, 0, 0, bytesUsed) + buf = newBuffer + } + chunk.copy(buf, bytesUsed) + bytesUsed += chunk.length + } + + var pos = 0 + var length = bytesUsed + + while (pos < length) { + if (discardTrailingNewline) { + if (buf[pos] === lineFeed) { + ++pos + } + discardTrailingNewline = false + } + + var lineLength = -1 + var fieldLength = startingFieldLength + var c + + for (var i = startingPos; lineLength < 0 && i < length; ++i) { + c = buf[i] + if (c === colon) { + if (fieldLength < 0) { + fieldLength = i - pos + } + } else if (c === carriageReturn) { + discardTrailingNewline = true + lineLength = i - pos + } else if (c === lineFeed) { + lineLength = i - pos + } + } + + if (lineLength < 0) { + startingPos = length - pos + startingFieldLength = fieldLength + break + } else { + startingPos = 0 + startingFieldLength = -1 + } + + parseEventStreamLine(buf, pos, fieldLength, lineLength) + + pos += lineLength + 1 + } + + if (pos === length) { + buf = void 0 + bytesUsed = 0 + } else if (pos > 0) { + buf = buf.slice(pos, bytesUsed) + bytesUsed = buf.length + } + }) + }) + + req.on('error', function (err) { + self.connectionInProgress = false + onConnectionClosed(err.message) + }) + + if (req.setNoDelay) req.setNoDelay(true) + req.end() + } + + connect() + + function _emit () { + if (self.listeners(arguments[0]).length > 0) { + self.emit.apply(self, arguments) + } + } + + this._close = function () { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CLOSED + if (req.abort) req.abort() + if (req.xhr && req.xhr.abort) req.xhr.abort() + } + + function parseEventStreamLine (buf, pos, fieldLength, lineLength) { + if (lineLength === 0) { + if (data.length > 0) { + var type = eventName || 'message' + _emit(type, new MessageEvent(type, { + data: data.slice(0, -1), // remove trailing newline + lastEventId: lastEventId, + origin: new URL(url).origin + })) + data = '' + } + eventName = void 0 + } else if (fieldLength > 0) { + var noValue = fieldLength < 0 + var step = 0 + var field = buf.slice(pos, pos + (noValue ? lineLength : fieldLength)).toString() + + if (noValue) { + step = lineLength + } else if (buf[pos + fieldLength + 1] !== space) { + step = fieldLength + 1 + } else { + step = fieldLength + 2 + } + pos += step + + var valueLength = lineLength - step + var value = buf.slice(pos, pos + valueLength).toString() + + if (field === 'data') { + data += value + '\n' + } else if (field === 'event') { + eventName = value + } else if (field === 'id') { + lastEventId = value + } else if (field === 'retry') { + var retry = parseInt(value, 10) + if (!Number.isNaN(retry)) { + self.reconnectInterval = retry + } + } + } + } +} + +module.exports = EventSource + +util.inherits(EventSource, events.EventEmitter) +EventSource.prototype.constructor = EventSource; // make stacktraces readable + +['open', 'error', 'message'].forEach(function (method) { + Object.defineProperty(EventSource.prototype, 'on' + method, { + /** + * Returns the current listener + * + * @return {Mixed} the set function or undefined + * @api private + */ + get: function get () { + var listener = this.listeners(method)[0] + return listener ? (listener._listener ? listener._listener : listener) : undefined + }, + + /** + * Start listening for events + * + * @param {Function} listener the listener + * @return {Mixed} the set function or undefined + * @api private + */ + set: function set (listener) { + this.removeAllListeners(method) + this.addEventListener(method, listener) + } + }) +}) + +/** + * Ready states + */ +Object.defineProperty(EventSource, 'CONNECTING', {enumerable: true, value: 0}) +Object.defineProperty(EventSource, 'OPEN', {enumerable: true, value: 1}) +Object.defineProperty(EventSource, 'CLOSED', {enumerable: true, value: 2}) + +EventSource.prototype.CONNECTING = 0 +EventSource.prototype.OPEN = 1 +EventSource.prototype.CLOSED = 2 + +/** + * Closes the connection, if one is made, and sets the readyState attribute to 2 (closed) + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventSource/close + * @api public + */ +EventSource.prototype.close = function () { + this._close() +} + +/** + * Emulates the W3C Browser based WebSocket interface using addEventListener. + * + * @param {String} type A string representing the event type to listen out for + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.addEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.addEventListener = function addEventListener (type, listener) { + if (typeof listener === 'function') { + // store a reference so we can return the original function again + listener._listener = listener + this.on(type, listener) + } +} + +/** + * Emulates the W3C Browser based WebSocket interface using dispatchEvent. + * + * @param {Event} event An event to be dispatched + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/dispatchEvent + * @api public + */ +EventSource.prototype.dispatchEvent = function dispatchEvent (event) { + if (!event.type) { + throw new Error('UNSPECIFIED_EVENT_TYPE_ERR') + } + // if event is instance of an CustomEvent (or has 'details' property), + // send the detail object as the payload for the event + this.emit(event.type, event.detail) +} + +/** + * Emulates the W3C Browser based WebSocket interface using removeEventListener. + * + * @param {String} type A string representing the event type to remove + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.removeEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.removeEventListener = function removeEventListener (type, listener) { + if (typeof listener === 'function') { + listener._listener = undefined + this.removeListener(type, listener) + } +} + +/** + * W3C Event + * + * @see http://www.w3.org/TR/DOM-Level-3-Events/#interface-Event + * @api private + */ +function Event (type, optionalProperties) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + if (optionalProperties) { + for (var f in optionalProperties) { + if (optionalProperties.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: optionalProperties[f], enumerable: true }) + } + } + } +} + +/** + * W3C MessageEvent + * + * @see http://www.w3.org/TR/webmessaging/#event-definitions + * @api private + */ +function MessageEvent (type, eventInitDict) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + for (var f in eventInitDict) { + if (eventInitDict.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: eventInitDict[f], enumerable: true }) + } + } +} + +/** + * Returns a new object of headers that does not include any authorization and cookie headers + * + * @param {Object} headers An object of headers ({[headerName]: headerValue}) + * @return {Object} a new object of headers + * @api private + */ +function removeUnsafeHeaders (headers) { + var safe = {} + for (var key in headers) { + if (reUnsafeHeader.test(key)) { + continue + } + + safe[key] = headers[key] + } + + return safe +} diff --git a/node_modules/eventsource/package.json b/node_modules/eventsource/package.json new file mode 100644 index 0000000..ad90321 --- /dev/null +++ b/node_modules/eventsource/package.json @@ -0,0 +1,60 @@ +{ + "name": "eventsource", + "version": "2.0.2", + "description": "W3C compliant EventSource client for Node.js and browser (polyfill)", + "keywords": [ + "eventsource", + "http", + "streaming", + "sse", + "polyfill" + ], + "homepage": "http://github.com/EventSource/eventsource", + "author": "Aslak Hellesøy ", + "repository": { + "type": "git", + "url": "git://github.com/EventSource/eventsource.git" + }, + "bugs": { + "url": "http://github.com/EventSource/eventsource/issues" + }, + "directories": { + "lib": "./lib" + }, + "main": "./lib/eventsource", + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/EventSource/eventsource/raw/master/LICENSE" + } + ], + "devDependencies": { + "buffer-from": "^1.1.1", + "express": "^4.15.3", + "mocha": "^3.5.3", + "nyc": "^11.2.1", + "serve-static": "^1.12.3", + "ssestream": "^1.0.0", + "standard": "^10.0.2", + "webpack": "^3.5.6" + }, + "scripts": { + "test": "mocha --reporter spec && standard", + "polyfill": "webpack lib/eventsource-polyfill.js example/eventsource-polyfill.js", + "postpublish": "git push && git push --tags", + "coverage": "nyc --reporter=html --reporter=text _mocha --reporter spec" + }, + "engines": { + "node": ">=12.0.0" + }, + "dependencies": {}, + "standard": { + "ignore": [ + "example/eventsource-polyfill.js" + ], + "globals": [ + "URL" + ] + } +} diff --git a/node_modules/fast-safe-stringify/.travis.yml b/node_modules/fast-safe-stringify/.travis.yml new file mode 100644 index 0000000..2b06d25 --- /dev/null +++ b/node_modules/fast-safe-stringify/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +sudo: false +node_js: +- '4' +- '6' +- '8' +- '9' +- '10' diff --git a/node_modules/fast-safe-stringify/CHANGELOG.md b/node_modules/fast-safe-stringify/CHANGELOG.md new file mode 100644 index 0000000..55f2d08 --- /dev/null +++ b/node_modules/fast-safe-stringify/CHANGELOG.md @@ -0,0 +1,17 @@ +# Changelog + +## v.2.0.0 + +Features + +- Added stable-stringify (see documentation) +- Support replacer +- Support spacer +- toJSON support without forceDecirc property +- Improved performance + +Breaking changes + +- Manipulating the input value in a `toJSON` function is not possible anymore in + all cases (see documentation) +- Dropped support for e.g. IE8 and Node.js < 4 diff --git a/node_modules/fast-safe-stringify/LICENSE b/node_modules/fast-safe-stringify/LICENSE new file mode 100644 index 0000000..d310c2d --- /dev/null +++ b/node_modules/fast-safe-stringify/LICENSE @@ -0,0 +1,23 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Mark Clements +Copyright (c) 2017 David Mark Clements & Matteo Collina +Copyright (c) 2018 David Mark Clements, Matteo Collina & Ruben Bridgewater + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fast-safe-stringify/benchmark.js b/node_modules/fast-safe-stringify/benchmark.js new file mode 100644 index 0000000..7ba5e9f --- /dev/null +++ b/node_modules/fast-safe-stringify/benchmark.js @@ -0,0 +1,137 @@ +const Benchmark = require('benchmark') +const suite = new Benchmark.Suite() +const { inspect } = require('util') +const jsonStringifySafe = require('json-stringify-safe') +const fastSafeStringify = require('./') + +const array = new Array(10).fill(0).map((_, i) => i) +const obj = { foo: array } +const circ = JSON.parse(JSON.stringify(obj)) +circ.o = { obj: circ, array } +const circGetters = JSON.parse(JSON.stringify(obj)) +Object.assign(circGetters, { get o () { return { obj: circGetters, array } } }) + +const deep = require('./package.json') +deep.deep = JSON.parse(JSON.stringify(deep)) +deep.deep.deep = JSON.parse(JSON.stringify(deep)) +deep.deep.deep.deep = JSON.parse(JSON.stringify(deep)) +deep.array = array + +const deepCirc = JSON.parse(JSON.stringify(deep)) +deepCirc.deep.deep.deep.circ = deepCirc +deepCirc.deep.deep.circ = deepCirc +deepCirc.deep.circ = deepCirc +deepCirc.array = array + +const deepCircGetters = JSON.parse(JSON.stringify(deep)) +for (let i = 0; i < 10; i++) { + deepCircGetters[i.toString()] = { + deep: { + deep: { + get circ () { return deep.deep }, + deep: { get circ () { return deep.deep.deep } } + }, + get circ () { return deep } + }, + get array () { return array } + } +} + +const deepCircNonCongifurableGetters = JSON.parse(JSON.stringify(deep)) +Object.defineProperty(deepCircNonCongifurableGetters.deep.deep.deep, 'circ', { + get: () => deepCircNonCongifurableGetters, + enumerable: true, + configurable: false +}) +Object.defineProperty(deepCircNonCongifurableGetters.deep.deep, 'circ', { + get: () => deepCircNonCongifurableGetters, + enumerable: true, + configurable: false +}) +Object.defineProperty(deepCircNonCongifurableGetters.deep, 'circ', { + get: () => deepCircNonCongifurableGetters, + enumerable: true, + configurable: false +}) +Object.defineProperty(deepCircNonCongifurableGetters, 'array', { + get: () => array, + enumerable: true, + configurable: false +}) + +suite.add('util.inspect: simple object ', function () { + inspect(obj, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: circular ', function () { + inspect(circ, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: circular getters ', function () { + inspect(circGetters, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: deep ', function () { + inspect(deep, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: deep circular ', function () { + inspect(deepCirc, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: large deep circular getters ', function () { + inspect(deepCircGetters, { showHidden: false, depth: null }) +}) +suite.add('util.inspect: deep non-conf circular getters', function () { + inspect(deepCircNonCongifurableGetters, { showHidden: false, depth: null }) +}) + +suite.add('\njson-stringify-safe: simple object ', function () { + jsonStringifySafe(obj) +}) +suite.add('json-stringify-safe: circular ', function () { + jsonStringifySafe(circ) +}) +suite.add('json-stringify-safe: circular getters ', function () { + jsonStringifySafe(circGetters) +}) +suite.add('json-stringify-safe: deep ', function () { + jsonStringifySafe(deep) +}) +suite.add('json-stringify-safe: deep circular ', function () { + jsonStringifySafe(deepCirc) +}) +suite.add('json-stringify-safe: large deep circular getters ', function () { + jsonStringifySafe(deepCircGetters) +}) +suite.add('json-stringify-safe: deep non-conf circular getters', function () { + jsonStringifySafe(deepCircNonCongifurableGetters) +}) + +suite.add('\nfast-safe-stringify: simple object ', function () { + fastSafeStringify(obj) +}) +suite.add('fast-safe-stringify: circular ', function () { + fastSafeStringify(circ) +}) +suite.add('fast-safe-stringify: circular getters ', function () { + fastSafeStringify(circGetters) +}) +suite.add('fast-safe-stringify: deep ', function () { + fastSafeStringify(deep) +}) +suite.add('fast-safe-stringify: deep circular ', function () { + fastSafeStringify(deepCirc) +}) +suite.add('fast-safe-stringify: large deep circular getters ', function () { + fastSafeStringify(deepCircGetters) +}) +suite.add('fast-safe-stringify: deep non-conf circular getters', function () { + fastSafeStringify(deepCircNonCongifurableGetters) +}) + +// add listeners +suite.on('cycle', function (event) { + console.log(String(event.target)) +}) + +suite.on('complete', function () { + console.log('\nFastest is ' + this.filter('fastest').map('name')) +}) + +suite.run({ delay: 1, minSamples: 150 }) diff --git a/node_modules/fast-safe-stringify/index.d.ts b/node_modules/fast-safe-stringify/index.d.ts new file mode 100644 index 0000000..9a9b1f0 --- /dev/null +++ b/node_modules/fast-safe-stringify/index.d.ts @@ -0,0 +1,23 @@ +declare function stringify( + value: any, + replacer?: (key: string, value: any) => any, + space?: string | number, + options?: { depthLimit: number | undefined; edgesLimit: number | undefined } +): string; + +declare namespace stringify { + export function stable( + value: any, + replacer?: (key: string, value: any) => any, + space?: string | number, + options?: { depthLimit: number | undefined; edgesLimit: number | undefined } + ): string; + export function stableStringify( + value: any, + replacer?: (key: string, value: any) => any, + space?: string | number, + options?: { depthLimit: number | undefined; edgesLimit: number | undefined } + ): string; +} + +export default stringify; diff --git a/node_modules/fast-safe-stringify/index.js b/node_modules/fast-safe-stringify/index.js new file mode 100644 index 0000000..ecf7e51 --- /dev/null +++ b/node_modules/fast-safe-stringify/index.js @@ -0,0 +1,229 @@ +module.exports = stringify +stringify.default = stringify +stringify.stable = deterministicStringify +stringify.stableStringify = deterministicStringify + +var LIMIT_REPLACE_NODE = '[...]' +var CIRCULAR_REPLACE_NODE = '[Circular]' + +var arr = [] +var replacerStack = [] + +function defaultOptions () { + return { + depthLimit: Number.MAX_SAFE_INTEGER, + edgesLimit: Number.MAX_SAFE_INTEGER + } +} + +// Regular stringify +function stringify (obj, replacer, spacer, options) { + if (typeof options === 'undefined') { + options = defaultOptions() + } + + decirc(obj, '', 0, [], undefined, 0, options) + var res + try { + if (replacerStack.length === 0) { + res = JSON.stringify(obj, replacer, spacer) + } else { + res = JSON.stringify(obj, replaceGetterValues(replacer), spacer) + } + } catch (_) { + return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]') + } finally { + while (arr.length !== 0) { + var part = arr.pop() + if (part.length === 4) { + Object.defineProperty(part[0], part[1], part[3]) + } else { + part[0][part[1]] = part[2] + } + } + } + return res +} + +function setReplace (replace, val, k, parent) { + var propertyDescriptor = Object.getOwnPropertyDescriptor(parent, k) + if (propertyDescriptor.get !== undefined) { + if (propertyDescriptor.configurable) { + Object.defineProperty(parent, k, { value: replace }) + arr.push([parent, k, val, propertyDescriptor]) + } else { + replacerStack.push([val, k, replace]) + } + } else { + parent[k] = replace + arr.push([parent, k, val]) + } +} + +function decirc (val, k, edgeIndex, stack, parent, depth, options) { + depth += 1 + var i + if (typeof val === 'object' && val !== null) { + for (i = 0; i < stack.length; i++) { + if (stack[i] === val) { + setReplace(CIRCULAR_REPLACE_NODE, val, k, parent) + return + } + } + + if ( + typeof options.depthLimit !== 'undefined' && + depth > options.depthLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + if ( + typeof options.edgesLimit !== 'undefined' && + edgeIndex + 1 > options.edgesLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + stack.push(val) + // Optimize for Arrays. Big arrays could kill the performance otherwise! + if (Array.isArray(val)) { + for (i = 0; i < val.length; i++) { + decirc(val[i], i, i, stack, val, depth, options) + } + } else { + var keys = Object.keys(val) + for (i = 0; i < keys.length; i++) { + var key = keys[i] + decirc(val[key], key, i, stack, val, depth, options) + } + } + stack.pop() + } +} + +// Stable-stringify +function compareFunction (a, b) { + if (a < b) { + return -1 + } + if (a > b) { + return 1 + } + return 0 +} + +function deterministicStringify (obj, replacer, spacer, options) { + if (typeof options === 'undefined') { + options = defaultOptions() + } + + var tmp = deterministicDecirc(obj, '', 0, [], undefined, 0, options) || obj + var res + try { + if (replacerStack.length === 0) { + res = JSON.stringify(tmp, replacer, spacer) + } else { + res = JSON.stringify(tmp, replaceGetterValues(replacer), spacer) + } + } catch (_) { + return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]') + } finally { + // Ensure that we restore the object as it was. + while (arr.length !== 0) { + var part = arr.pop() + if (part.length === 4) { + Object.defineProperty(part[0], part[1], part[3]) + } else { + part[0][part[1]] = part[2] + } + } + } + return res +} + +function deterministicDecirc (val, k, edgeIndex, stack, parent, depth, options) { + depth += 1 + var i + if (typeof val === 'object' && val !== null) { + for (i = 0; i < stack.length; i++) { + if (stack[i] === val) { + setReplace(CIRCULAR_REPLACE_NODE, val, k, parent) + return + } + } + try { + if (typeof val.toJSON === 'function') { + return + } + } catch (_) { + return + } + + if ( + typeof options.depthLimit !== 'undefined' && + depth > options.depthLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + if ( + typeof options.edgesLimit !== 'undefined' && + edgeIndex + 1 > options.edgesLimit + ) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent) + return + } + + stack.push(val) + // Optimize for Arrays. Big arrays could kill the performance otherwise! + if (Array.isArray(val)) { + for (i = 0; i < val.length; i++) { + deterministicDecirc(val[i], i, i, stack, val, depth, options) + } + } else { + // Create a temporary object in the required way + var tmp = {} + var keys = Object.keys(val).sort(compareFunction) + for (i = 0; i < keys.length; i++) { + var key = keys[i] + deterministicDecirc(val[key], key, i, stack, val, depth, options) + tmp[key] = val[key] + } + if (typeof parent !== 'undefined') { + arr.push([parent, k, val]) + parent[k] = tmp + } else { + return tmp + } + } + stack.pop() + } +} + +// wraps replacer function to handle values we couldn't replace +// and mark them as replaced value +function replaceGetterValues (replacer) { + replacer = + typeof replacer !== 'undefined' + ? replacer + : function (k, v) { + return v + } + return function (key, val) { + if (replacerStack.length > 0) { + for (var i = 0; i < replacerStack.length; i++) { + var part = replacerStack[i] + if (part[1] === key && part[0] === val) { + val = part[2] + replacerStack.splice(i, 1) + break + } + } + } + return replacer.call(this, key, val) + } +} diff --git a/node_modules/fast-safe-stringify/package.json b/node_modules/fast-safe-stringify/package.json new file mode 100644 index 0000000..206a591 --- /dev/null +++ b/node_modules/fast-safe-stringify/package.json @@ -0,0 +1,46 @@ +{ + "name": "fast-safe-stringify", + "version": "2.1.1", + "description": "Safely and quickly serialize JavaScript objects", + "keywords": [ + "stable", + "stringify", + "JSON", + "JSON.stringify", + "safe", + "serialize" + ], + "main": "index.js", + "scripts": { + "test": "standard && tap --no-esm test.js test-stable.js", + "benchmark": "node benchmark.js" + }, + "author": "David Mark Clements", + "contributors": [ + "Ruben Bridgewater", + "Matteo Collina", + "Ben Gourley", + "Gabriel Lesperance", + "Alex Liu", + "Christoph Walcher", + "Nicholas Young" + ], + "license": "MIT", + "typings": "index", + "devDependencies": { + "benchmark": "^2.1.4", + "clone": "^2.1.0", + "json-stringify-safe": "^5.0.1", + "standard": "^11.0.0", + "tap": "^12.0.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/davidmarkclements/fast-safe-stringify.git" + }, + "bugs": { + "url": "https://github.com/davidmarkclements/fast-safe-stringify/issues" + }, + "homepage": "https://github.com/davidmarkclements/fast-safe-stringify#readme", + "dependencies": {} +} diff --git a/node_modules/fast-safe-stringify/readme.md b/node_modules/fast-safe-stringify/readme.md new file mode 100644 index 0000000..47179c9 --- /dev/null +++ b/node_modules/fast-safe-stringify/readme.md @@ -0,0 +1,170 @@ +# fast-safe-stringify + +Safe and fast serialization alternative to [JSON.stringify][]. + +Gracefully handles circular structures instead of throwing in most cases. +It could return an error string if the circular object is too complex to analyze, +e.g. in case there are proxies involved. + +Provides a deterministic ("stable") version as well that will also gracefully +handle circular structures. See the example below for further information. + +## Usage + +The same as [JSON.stringify][]. + +`stringify(value[, replacer[, space[, options]]])` + +```js +const safeStringify = require('fast-safe-stringify') +const o = { a: 1 } +o.o = o + +console.log(safeStringify(o)) +// '{"a":1,"o":"[Circular]"}' +console.log(JSON.stringify(o)) +// TypeError: Converting circular structure to JSON + +function replacer(key, value) { + console.log('Key:', JSON.stringify(key), 'Value:', JSON.stringify(value)) + // Remove the circular structure + if (value === '[Circular]') { + return + } + return value +} + +// those are also defaults limits when no options object is passed into safeStringify +// configure it to lower the limit. +const options = { + depthLimit: Number.MAX_SAFE_INTEGER, + edgesLimit: Number.MAX_SAFE_INTEGER +}; + +const serialized = safeStringify(o, replacer, 2, options) +// Key: "" Value: {"a":1,"o":"[Circular]"} +// Key: "a" Value: 1 +// Key: "o" Value: "[Circular]" +console.log(serialized) +// { +// "a": 1 +// } +``` + + +Using the deterministic version also works the same: + +```js +const safeStringify = require('fast-safe-stringify') +const o = { b: 1, a: 0 } +o.o = o + +console.log(safeStringify(o)) +// '{"b":1,"a":0,"o":"[Circular]"}' +console.log(safeStringify.stableStringify(o)) +// '{"a":0,"b":1,"o":"[Circular]"}' +console.log(JSON.stringify(o)) +// TypeError: Converting circular structure to JSON +``` + +A faster and side-effect free implementation is available in the +[safe-stable-stringify][] module. However it is still considered experimental +due to a new and more complex implementation. + +### Replace strings constants + +- `[Circular]` - when same reference is found +- `[...]` - when some limit from options object is reached + +## Differences to JSON.stringify + +In general the behavior is identical to [JSON.stringify][]. The [`replacer`][] +and [`space`][] options are also available. + +A few exceptions exist to [JSON.stringify][] while using [`toJSON`][] or +[`replacer`][]: + +### Regular safe stringify + +- Manipulating a circular structure of the passed in value in a `toJSON` or the + `replacer` is not possible! It is possible for any other value and property. + +- In case a circular structure is detected and the [`replacer`][] is used it + will receive the string `[Circular]` as the argument instead of the circular + object itself. + +### Deterministic ("stable") safe stringify + +- Manipulating the input object either in a [`toJSON`][] or the [`replacer`][] + function will not have any effect on the output. The output entirely relies on + the shape the input value had at the point passed to the stringify function! + +- In case a circular structure is detected and the [`replacer`][] is used it + will receive the string `[Circular]` as the argument instead of the circular + object itself. + +A side effect free variation without these limitations can be found as well +([`safe-stable-stringify`][]). It is also faster than the current +implementation. It is still considered experimental due to a new and more +complex implementation. + +## Benchmarks + +Although not JSON, the Node.js `util.inspect` method can be used for similar +purposes (e.g. logging) and also handles circular references. + +Here we compare `fast-safe-stringify` with some alternatives: +(Lenovo T450s with a i7-5600U CPU using Node.js 8.9.4) + +```md +fast-safe-stringify: simple object x 1,121,497 ops/sec ±0.75% (97 runs sampled) +fast-safe-stringify: circular x 560,126 ops/sec ±0.64% (96 runs sampled) +fast-safe-stringify: deep x 32,472 ops/sec ±0.57% (95 runs sampled) +fast-safe-stringify: deep circular x 32,513 ops/sec ±0.80% (92 runs sampled) + +util.inspect: simple object x 272,837 ops/sec ±1.48% (90 runs sampled) +util.inspect: circular x 116,896 ops/sec ±1.19% (95 runs sampled) +util.inspect: deep x 19,382 ops/sec ±0.66% (92 runs sampled) +util.inspect: deep circular x 18,717 ops/sec ±0.63% (96 runs sampled) + +json-stringify-safe: simple object x 233,621 ops/sec ±0.97% (94 runs sampled) +json-stringify-safe: circular x 110,409 ops/sec ±1.85% (95 runs sampled) +json-stringify-safe: deep x 8,705 ops/sec ±0.87% (96 runs sampled) +json-stringify-safe: deep circular x 8,336 ops/sec ±2.20% (93 runs sampled) +``` + +For stable stringify comparisons, see the performance benchmarks in the +[`safe-stable-stringify`][] readme. + +## Protip + +Whether `fast-safe-stringify` or alternatives are used: if the use case +consists of deeply nested objects without circular references the following +pattern will give best results. +Shallow or one level nested objects on the other hand will slow down with it. +It is entirely dependant on the use case. + +```js +const stringify = require('fast-safe-stringify') + +function tryJSONStringify (obj) { + try { return JSON.stringify(obj) } catch (_) {} +} + +const serializedString = tryJSONStringify(deep) || stringify(deep) +``` + +## Acknowledgements + +Sponsored by [nearForm](http://nearform.com) + +## License + +MIT + +[`replacer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20replacer%20parameter +[`safe-stable-stringify`]: https://github.com/BridgeAR/safe-stable-stringify +[`space`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The%20space%20argument +[`toJSON`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON()_behavior +[benchmark]: https://github.com/epoberezkin/fast-json-stable-stringify/blob/67f688f7441010cfef91a6147280cc501701e83b/benchmark +[JSON.stringify]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify diff --git a/node_modules/fast-safe-stringify/test-stable.js b/node_modules/fast-safe-stringify/test-stable.js new file mode 100644 index 0000000..c55b95c --- /dev/null +++ b/node_modules/fast-safe-stringify/test-stable.js @@ -0,0 +1,404 @@ +const test = require('tap').test +const fss = require('./').stable +const clone = require('clone') +const s = JSON.stringify +const stream = require('stream') + +test('circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.circle = fixture + const expected = s({ circle: '[Circular]', name: 'Tywin Lannister' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getter reference to root', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + + const expected = s({ circle: '[Circular]', name: 'Tywin Lannister' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.id = { circle: fixture } + const expected = s({ id: { circle: '[Circular]' }, name: 'Tywin Lannister' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.dinklage = fixture.child + const expected = s({ + child: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.actor = { dinklage: fixture.child } + const expected = s({ + child: { + actor: { dinklage: '[Circular]' }, + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular objects in an array', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.hand = [fixture, fixture] + const expected = s({ + hand: ['[Circular]', '[Circular]'], + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular references in an array', function (assert) { + const fixture = { + name: 'Tywin Lannister', + offspring: [{ name: 'Tyrion Lannister' }, { name: 'Cersei Lannister' }] + } + fixture.offspring[0].dinklage = fixture.offspring[0] + fixture.offspring[1].headey = fixture.offspring[1] + + const expected = s({ + name: 'Tywin Lannister', + offspring: [ + { dinklage: '[Circular]', name: 'Tyrion Lannister' }, + { headey: '[Circular]', name: 'Cersei Lannister' } + ] + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular arrays', function (assert) { + const fixture = [] + fixture.push(fixture, fixture) + const expected = s(['[Circular]', '[Circular]']) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular arrays', function (assert) { + const fixture = [] + fixture.push( + { name: 'Jon Snow', bastards: fixture }, + { name: 'Ramsay Bolton', bastards: fixture } + ) + const expected = s([ + { bastards: '[Circular]', name: 'Jon Snow' }, + { bastards: '[Circular]', name: 'Ramsay Bolton' } + ]) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in objects', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = { + motherOfDragons: daenerys, + queenOfMeereen: daenerys + } + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in arrays', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = [daenerys, daenerys] + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('double child circular reference', function (assert) { + // create circular reference + const child = { name: 'Tyrion Lannister' } + child.dinklage = child + + // include it twice in the fixture + const fixture = { name: 'Tywin Lannister', childA: child, childB: child } + const cloned = clone(fixture) + const expected = s({ + childA: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + childB: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + + // check if the fixture has not been modified + assert.same(fixture, cloned) + assert.end() +}) + +test('child circular reference with toJSON', function (assert) { + // Create a test object that has an overridden `toJSON` property + TestObject.prototype.toJSON = function () { + return { special: 'case' } + } + function TestObject (content) {} + + // Creating a simple circular object structure + const parentObject = {} + parentObject.childObject = new TestObject() + parentObject.childObject.parentObject = parentObject + + // Creating a simple circular object structure + const otherParentObject = new TestObject() + otherParentObject.otherChildObject = {} + otherParentObject.otherChildObject.otherParentObject = otherParentObject + + // Making sure our original tests work + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + // Should both be idempotent + assert.equal(fss(parentObject), '{"childObject":{"special":"case"}}') + assert.equal(fss(otherParentObject), '{"special":"case"}') + + // Therefore the following assertion should be `true` + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + assert.end() +}) + +test('null object', function (assert) { + const expected = s(null) + const actual = fss(null) + assert.equal(actual, expected) + assert.end() +}) + +test('null property', function (assert) { + const expected = s({ f: null }) + const actual = fss({ f: null }) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference in toJSON', function (assert) { + var circle = { some: 'data' } + circle.circle = circle + var a = { + b: { + toJSON: function () { + a.b = 2 + return '[Redacted]' + } + }, + baz: { + circle, + toJSON: function () { + a.baz = circle + return '[Redacted]' + } + } + } + var o = { + a, + bar: a + } + + const expected = s({ + a: { + b: '[Redacted]', + baz: '[Redacted]' + }, + bar: { + // TODO: This is a known limitation of the current implementation. + // The ideal result would be: + // + // b: 2, + // baz: { + // circle: '[Circular]', + // some: 'data' + // } + // + b: '[Redacted]', + baz: '[Redacted]' + } + }) + const actual = fss(o) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getters are restored when stringified', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('non-configurable circular getters use a replacer instead of markers', function (assert) { + const fixture = { name: 'Tywin Lannister' } + Object.defineProperty(fixture, 'circle', { + configurable: false, + get: function () { + return fixture + }, + enumerable: true + }) + + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('getter child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + get dinklage () { + return fixture.child + } + }, + get self () { + return fixture + } + } + + const expected = s({ + child: { + dinklage: '[Circular]', + name: 'Tyrion Lannister' + }, + name: 'Tywin Lannister', + self: '[Circular]' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('Proxy throwing', function (assert) { + assert.plan(1) + const s = new stream.PassThrough() + s.resume() + s.write('', () => { + assert.end() + }) + const actual = fss({ s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) }) + assert.equal(actual, '"[unable to serialize, circular reference is too complex to analyze]"') +}) + +test('depthLimit option - will replace deep objects', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister' + }, + get self () { + return fixture + } + } + + const expected = s({ + child: '[...]', + name: 'Tywin Lannister', + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 1, + edgesLimit: 1 + }) + assert.equal(actual, expected) + assert.end() +}) + +test('edgesLimit option - will replace deep objects', function (assert) { + const fixture = { + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: { test: 'test' } + }, + array: [ + { test: 'test' }, + { test: 'test' }, + { test: 'test' }, + { test: 'test' } + ], + get self () { + return fixture + } + } + + const expected = s({ + array: [{ test: 'test' }, { test: 'test' }, { test: 'test' }, '[...]'], + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: '[...]' + }, + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 3, + edgesLimit: 3 + }) + assert.equal(actual, expected) + assert.end() +}) diff --git a/node_modules/fast-safe-stringify/test.js b/node_modules/fast-safe-stringify/test.js new file mode 100644 index 0000000..a4170e9 --- /dev/null +++ b/node_modules/fast-safe-stringify/test.js @@ -0,0 +1,397 @@ +const test = require('tap').test +const fss = require('./') +const clone = require('clone') +const s = JSON.stringify +const stream = require('stream') + +test('circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.circle = fixture + const expected = s({ name: 'Tywin Lannister', circle: '[Circular]' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getter reference to root', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + const expected = s({ name: 'Tywin Lannister', circle: '[Circular]' }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular reference to root', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.id = { circle: fixture } + const expected = s({ name: 'Tywin Lannister', id: { circle: '[Circular]' } }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.dinklage = fixture.child + const expected = s({ + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + } + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { name: 'Tyrion Lannister' } + } + fixture.child.actor = { dinklage: fixture.child } + const expected = s({ + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + actor: { dinklage: '[Circular]' } + } + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular objects in an array', function (assert) { + const fixture = { name: 'Tywin Lannister' } + fixture.hand = [fixture, fixture] + const expected = s({ + name: 'Tywin Lannister', + hand: ['[Circular]', '[Circular]'] + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular references in an array', function (assert) { + const fixture = { + name: 'Tywin Lannister', + offspring: [{ name: 'Tyrion Lannister' }, { name: 'Cersei Lannister' }] + } + fixture.offspring[0].dinklage = fixture.offspring[0] + fixture.offspring[1].headey = fixture.offspring[1] + + const expected = s({ + name: 'Tywin Lannister', + offspring: [ + { name: 'Tyrion Lannister', dinklage: '[Circular]' }, + { name: 'Cersei Lannister', headey: '[Circular]' } + ] + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('circular arrays', function (assert) { + const fixture = [] + fixture.push(fixture, fixture) + const expected = s(['[Circular]', '[Circular]']) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('nested circular arrays', function (assert) { + const fixture = [] + fixture.push( + { name: 'Jon Snow', bastards: fixture }, + { name: 'Ramsay Bolton', bastards: fixture } + ) + const expected = s([ + { name: 'Jon Snow', bastards: '[Circular]' }, + { name: 'Ramsay Bolton', bastards: '[Circular]' } + ]) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in objects', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = { + motherOfDragons: daenerys, + queenOfMeereen: daenerys + } + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('repeated non-circular references in arrays', function (assert) { + const daenerys = { name: 'Daenerys Targaryen' } + const fixture = [daenerys, daenerys] + const expected = s(fixture) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('double child circular reference', function (assert) { + // create circular reference + const child = { name: 'Tyrion Lannister' } + child.dinklage = child + + // include it twice in the fixture + const fixture = { name: 'Tywin Lannister', childA: child, childB: child } + const cloned = clone(fixture) + const expected = s({ + name: 'Tywin Lannister', + childA: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + }, + childB: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + } + }) + const actual = fss(fixture) + assert.equal(actual, expected) + + // check if the fixture has not been modified + assert.same(fixture, cloned) + assert.end() +}) + +test('child circular reference with toJSON', function (assert) { + // Create a test object that has an overridden `toJSON` property + TestObject.prototype.toJSON = function () { + return { special: 'case' } + } + function TestObject (content) {} + + // Creating a simple circular object structure + const parentObject = {} + parentObject.childObject = new TestObject() + parentObject.childObject.parentObject = parentObject + + // Creating a simple circular object structure + const otherParentObject = new TestObject() + otherParentObject.otherChildObject = {} + otherParentObject.otherChildObject.otherParentObject = otherParentObject + + // Making sure our original tests work + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + // Should both be idempotent + assert.equal(fss(parentObject), '{"childObject":{"special":"case"}}') + assert.equal(fss(otherParentObject), '{"special":"case"}') + + // Therefore the following assertion should be `true` + assert.same(parentObject.childObject.parentObject, parentObject) + assert.same( + otherParentObject.otherChildObject.otherParentObject, + otherParentObject + ) + + assert.end() +}) + +test('null object', function (assert) { + const expected = s(null) + const actual = fss(null) + assert.equal(actual, expected) + assert.end() +}) + +test('null property', function (assert) { + const expected = s({ f: null }) + const actual = fss({ f: null }) + assert.equal(actual, expected) + assert.end() +}) + +test('nested child circular reference in toJSON', function (assert) { + const circle = { some: 'data' } + circle.circle = circle + const a = { + b: { + toJSON: function () { + a.b = 2 + return '[Redacted]' + } + }, + baz: { + circle, + toJSON: function () { + a.baz = circle + return '[Redacted]' + } + } + } + const o = { + a, + bar: a + } + + const expected = s({ + a: { + b: '[Redacted]', + baz: '[Redacted]' + }, + bar: { + b: 2, + baz: { + some: 'data', + circle: '[Circular]' + } + } + }) + const actual = fss(o) + assert.equal(actual, expected) + assert.end() +}) + +test('circular getters are restored when stringified', function (assert) { + const fixture = { + name: 'Tywin Lannister', + get circle () { + return fixture + } + } + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('non-configurable circular getters use a replacer instead of markers', function (assert) { + const fixture = { name: 'Tywin Lannister' } + Object.defineProperty(fixture, 'circle', { + configurable: false, + get: function () { + return fixture + }, + enumerable: true + }) + + fss(fixture) + + assert.equal(fixture.circle, fixture) + assert.end() +}) + +test('getter child circular reference are replaced instead of marked', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + get dinklage () { + return fixture.child + } + }, + get self () { + return fixture + } + } + + const expected = s({ + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister', + dinklage: '[Circular]' + }, + self: '[Circular]' + }) + const actual = fss(fixture) + assert.equal(actual, expected) + assert.end() +}) + +test('Proxy throwing', function (assert) { + assert.plan(1) + const s = new stream.PassThrough() + s.resume() + s.write('', () => { + assert.end() + }) + const actual = fss({ s, p: new Proxy({}, { get () { throw new Error('kaboom') } }) }) + assert.equal(actual, '"[unable to serialize, circular reference is too complex to analyze]"') +}) + +test('depthLimit option - will replace deep objects', function (assert) { + const fixture = { + name: 'Tywin Lannister', + child: { + name: 'Tyrion Lannister' + }, + get self () { + return fixture + } + } + + const expected = s({ + name: 'Tywin Lannister', + child: '[...]', + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 1, + edgesLimit: 1 + }) + assert.equal(actual, expected) + assert.end() +}) + +test('edgesLimit option - will replace deep objects', function (assert) { + const fixture = { + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: { test: 'test' } + }, + array: [ + { test: 'test' }, + { test: 'test' }, + { test: 'test' }, + { test: 'test' } + ], + get self () { + return fixture + } + } + + const expected = s({ + object: { + 1: { test: 'test' }, + 2: { test: 'test' }, + 3: { test: 'test' }, + 4: '[...]' + }, + array: [{ test: 'test' }, { test: 'test' }, { test: 'test' }, '[...]'], + self: '[Circular]' + }) + const actual = fss(fixture, undefined, undefined, { + depthLimit: 3, + edgesLimit: 3 + }) + assert.equal(actual, expected) + assert.end() +}) diff --git a/node_modules/form-data/License b/node_modules/form-data/License new file mode 100644 index 0000000..c7ff12a --- /dev/null +++ b/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/form-data/README.md.bak b/node_modules/form-data/README.md.bak new file mode 100644 index 0000000..298a1a2 --- /dev/null +++ b/node_modules/form-data/README.md.bak @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/Readme.md b/node_modules/form-data/Readme.md new file mode 100644 index 0000000..298a1a2 --- /dev/null +++ b/node_modules/form-data/Readme.md @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/index.d.ts b/node_modules/form-data/index.d.ts new file mode 100644 index 0000000..295e9e9 --- /dev/null +++ b/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/form-data/lib/browser.js b/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000..09e7c70 --- /dev/null +++ b/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000..18dc819 --- /dev/null +++ b/node_modules/form-data/lib/form_data.js @@ -0,0 +1,501 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var Stream = require('stream').Stream; +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/form-data/lib/populate.js b/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000..4d35738 --- /dev/null +++ b/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/form-data/package.json b/node_modules/form-data/package.json new file mode 100644 index 0000000..0f20240 --- /dev/null +++ b/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/formidable/LICENSE b/node_modules/formidable/LICENSE new file mode 100644 index 0000000..5e7ad11 --- /dev/null +++ b/node_modules/formidable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2011-present Felix Geisendörfer, and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/formidable/README.md b/node_modules/formidable/README.md new file mode 100644 index 0000000..de67598 --- /dev/null +++ b/node_modules/formidable/README.md @@ -0,0 +1,826 @@ +

+ npm formidable package logo +

+ +# formidable [![npm version][npmv-img]][npmv-url] [![MIT license][license-img]][license-url] [![Libera Manifesto][libera-manifesto-img]][libera-manifesto-url] [![Twitter][twitter-img]][twitter-url] + +> A Node.js module for parsing form data, especially file uploads. + +[![Code style][codestyle-img]][codestyle-url] +[![codecoverage][codecov-img]][codecov-url] +[![linux build status][linux-build-img]][build-url] +[![windows build status][windows-build-img]][build-url] +[![macos build status][macos-build-img]][build-url] + +If you have any _how-to_ kind of questions, please read the [Contributing +Guide][contributing-url] and [Code of Conduct][code_of_conduct-url] +documents.
For bugs reports and feature requests, [please create an +issue][open-issue-url] or ping [@tunnckoCore / @3a1FcBx0](https://twitter.com/3a1FcBx0) +at Twitter. + +[![Conventional Commits][ccommits-img]][ccommits-url] +[![Minimum Required Nodejs][nodejs-img]][npmv-url] +[![Tidelift Subcsription][tidelift-img]][tidelift-url] +[![Buy me a Kofi][kofi-img]][kofi-url] +[![Renovate App Status][renovateapp-img]][renovateapp-url] +[![Make A Pull Request][prs-welcome-img]][prs-welcome-url] + +This project is [semantically versioned](https://semver.org) and available as +part of the [Tidelift Subscription][tidelift-url] for professional grade +assurances, enhanced support and security. +[Learn more.](https://tidelift.com/subscription/pkg/npm-formidable?utm_source=npm-formidable&utm_medium=referral&utm_campaign=enterprise) + +_The maintainers of `formidable` and thousands of other packages are working +with Tidelift to deliver commercial support and maintenance for the Open Source +dependencies you use to build your applications. Save time, reduce risk, and +improve code health, while paying the maintainers of the exact dependencies you +use._ + +[![][npm-weekly-img]][npmv-url] [![][npm-monthly-img]][npmv-url] +[![][npm-yearly-img]][npmv-url] [![][npm-alltime-img]][npmv-url] + +## Project Status: Maintained + +_Check [VERSION NOTES](https://github.com/node-formidable/formidable/blob/master/VERSION_NOTES.md) for more information on v1, v2, and v3 plans, NPM dist-tags and branches._ + +This module was initially developed by +[**@felixge**](https://github.com/felixge) for +[Transloadit](http://transloadit.com/), a service focused on uploading and +encoding images and videos. It has been battle-tested against hundreds of GBs of +file uploads from a large variety of clients and is considered production-ready +and is used in production for years. + +Currently, we are few maintainers trying to deal with it. :) More contributors +are always welcome! :heart: Jump on +[issue #412](https://github.com/felixge/node-formidable/issues/412) which is +closed, but if you are interested we can discuss it and add you after strict +rules, like enabling Two-Factor Auth in your npm and GitHub accounts. + +## Highlights + +- [Fast (~900-2500 mb/sec)](#benchmarks) & streaming multipart parser +- Automatically writing file uploads to disk (optional, see + [`options.fileWriteStreamHandler`](#options)) +- [Plugins API](#useplugin-plugin) - allowing custom parsers and plugins +- Low memory footprint +- Graceful error handling +- Very high test coverage + +## Install + +This project requires `Node.js >= 10.13`. Install it using +[yarn](https://yarnpkg.com) or [npm](https://npmjs.com).
_We highly +recommend to use Yarn when you think to contribute to this project._ + +This is a low-level package, and if you're using a high-level framework it _may_ +already be included. Check the examples below and the [examples/](https://github.com/node-formidable/formidable/tree/master/examples) folder. + +```sh +# v2 +npm install formidable +npm install formidable@latest +npm install formidable@v2 + +# or v3 +npm install formidable@v3 +``` + +_**Note:** In near future v3 will be published on the `latest` NPM dist-tag. Future not ready releases will continue to be published on `canary` dist-tag._ + + +## Examples + +For more examples look at the `examples/` directory. + +### with Node.js http module + +Parse an incoming file upload, with the +[Node.js's built-in `http` module](https://nodejs.org/api/http.html). + +```js +const http = require('http'); +const formidable = require('formidable'); + +const server = http.createServer((req, res) => { + if (req.url === '/api/upload' && req.method.toLowerCase() === 'post') { + // parse a file upload + const form = formidable({ multiples: true }); + + form.parse(req, (err, fields, files) => { + if (err) { + res.writeHead(err.httpCode || 400, { 'Content-Type': 'text/plain' }); + res.end(String(err)); + return; + } + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ fields, files }, null, 2)); + }); + + return; + } + + // show a file upload form + res.writeHead(200, { 'Content-Type': 'text/html' }); + res.end(` +

With Node.js "http" module

+
+
Text field title:
+
File:
+ +
+ `); +}); + +server.listen(8080, () => { + console.log('Server listening on http://localhost:8080/ ...'); +}); +``` + +### with Express.js + +There are multiple variants to do this, but Formidable just need Node.js Request +stream, so something like the following example should work just fine, without +any third-party [Express.js](https://ghub.now.sh/express) middleware. + +Or try the +[examples/with-express.js](https://github.com/node-formidable/formidable/blob/master/examples/with-express.js) + +```js +const express = require('express'); +const formidable = require('formidable'); + +const app = express(); + +app.get('/', (req, res) => { + res.send(` +

With "express" npm package

+
+
Text field title:
+
File:
+ +
+ `); +}); + +app.post('/api/upload', (req, res, next) => { + const form = formidable({ multiples: true }); + + form.parse(req, (err, fields, files) => { + if (err) { + next(err); + return; + } + res.json({ fields, files }); + }); +}); + +app.listen(3000, () => { + console.log('Server listening on http://localhost:3000 ...'); +}); +``` + +### with Koa and Formidable + +Of course, with [Koa v1, v2 or future v3](https://ghub.now.sh/koa) the things +are very similar. You can use `formidable` manually as shown below or through +the [koa-better-body](https://ghub.now.sh/koa-better-body) package which is +using `formidable` under the hood and support more features and different +request bodies, check its documentation for more info. + +_Note: this example is assuming Koa v2. Be aware that you should pass `ctx.req` +which is Node.js's Request, and **NOT** the `ctx.request` which is Koa's Request +object - there is a difference._ + +```js +const Koa = require('koa'); +const formidable = require('formidable'); + +const app = new Koa(); + +app.on('error', (err) => { + console.error('server error', err); +}); + +app.use(async (ctx, next) => { + if (ctx.url === '/api/upload' && ctx.method.toLowerCase() === 'post') { + const form = formidable({ multiples: true }); + + // not very elegant, but that's for now if you don't want to use `koa-better-body` + // or other middlewares. + await new Promise((resolve, reject) => { + form.parse(ctx.req, (err, fields, files) => { + if (err) { + reject(err); + return; + } + + ctx.set('Content-Type', 'application/json'); + ctx.status = 200; + ctx.state = { fields, files }; + ctx.body = JSON.stringify(ctx.state, null, 2); + resolve(); + }); + }); + await next(); + return; + } + + // show a file upload form + ctx.set('Content-Type', 'text/html'); + ctx.status = 200; + ctx.body = ` +

With "koa" npm package

+
+
Text field title:
+
File:
+ +
+ `; +}); + +app.use((ctx) => { + console.log('The next middleware is called'); + console.log('Results:', ctx.state); +}); + +app.listen(3000, () => { + console.log('Server listening on http://localhost:3000 ...'); +}); +``` + +## Benchmarks + +The benchmark is quite old, from the old codebase. But maybe quite true though. +Previously the numbers was around ~500 mb/sec. Currently with moving to the new +Node.js Streams API it's faster. You can clearly see the differences between the +Node versions. + +_Note: a lot better benchmarking could and should be done in future._ + +Benchmarked on 8GB RAM, Xeon X3440 (2.53 GHz, 4 cores, 8 threads) + +``` +~/github/node-formidable master +❯ nve --parallel 8 10 12 13 node benchmark/bench-multipart-parser.js + + ⬢ Node 8 + +1261.08 mb/sec + + ⬢ Node 10 + +1113.04 mb/sec + + ⬢ Node 12 + +2107.00 mb/sec + + ⬢ Node 13 + +2566.42 mb/sec +``` + +![benchmark January 29th, 2020](./benchmark/2020-01-29_xeon-x3440.png) + +## API + +### Formidable / IncomingForm + +All shown are equivalent. + +_Please pass [`options`](#options) to the function/constructor, not by assigning +them to the instance `form`_ + +```js +const formidable = require('formidable'); +const form = formidable(options); + +// or +const { formidable } = require('formidable'); +const form = formidable(options); + +// or +const { IncomingForm } = require('formidable'); +const form = new IncomingForm(options); + +// or +const { Formidable } = require('formidable'); +const form = new Formidable(options); +``` + +### Options + +See it's defaults in [src/Formidable.js DEFAULT_OPTIONS](./src/Formidable.js) +(the `DEFAULT_OPTIONS` constant). + +- `options.encoding` **{string}** - default `'utf-8'`; sets encoding for + incoming form fields, +- `options.uploadDir` **{string}** - default `os.tmpdir()`; the directory for + placing file uploads in. You can move them later by using `fs.rename()`. +- `options.keepExtensions` **{boolean}** - default `false`; to include the + extensions of the original files or not +- `options.allowEmptyFiles` **{boolean}** - default `true`; allow upload empty + files +- `options.minFileSize` **{number}** - default `1` (1byte); the minium size of + uploaded file. +- `options.maxFileSize` **{number}** - default `200 * 1024 * 1024` (200mb); + limit the size of uploaded file. +- `options.maxFields` **{number}** - default `1000`; limit the number of fields, set 0 for unlimited +- `options.maxFieldsSize` **{number}** - default `20 * 1024 * 1024` (20mb); + limit the amount of memory all fields together (except files) can allocate in + bytes. +- `options.hashAlgorithm` **{string | false}** - default `false`; include checksums calculated + for incoming files, set this to some hash algorithm, see + [crypto.createHash](https://nodejs.org/api/crypto.html#crypto_crypto_createhash_algorithm_options) + for available algorithms +- `options.fileWriteStreamHandler` **{function}** - default `null`, which by + default writes to host machine file system every file parsed; The function + should return an instance of a + [Writable stream](https://nodejs.org/api/stream.html#stream_class_stream_writable) + that will receive the uploaded file data. With this option, you can have any + custom behavior regarding where the uploaded file data will be streamed for. + If you are looking to write the file uploaded in other types of cloud storages + (AWS S3, Azure blob storage, Google cloud storage) or private file storage, + this is the option you're looking for. When this option is defined the default + behavior of writing the file in the host machine file system is lost. +- `options.multiples` **{boolean}** - default `false`; when you call the + `.parse` method, the `files` argument (of the callback) will contain arrays of + files for inputs which submit multiple files using the HTML5 `multiple` + attribute. Also, the `fields` argument will contain arrays of values for + fields that have names ending with '[]'. +- `options.filename` **{function}** - default `undefined` Use it to control + newFilename. Must return a string. Will be joined with options.uploadDir. + +- `options.filter` **{function}** - default function that always returns true. + Use it to filter files before they are uploaded. Must return a boolean. + + +#### `options.filename` **{function}** function (name, ext, part, form) -> string + +_**Note:** If this size of combined fields, or size of some file is exceeded, an +`'error'` event is fired._ + +```js +// The amount of bytes received for this form so far. +form.bytesReceived; +``` + +```js +// The expected number of bytes in this form. +form.bytesExpected; +``` + +#### `options.filter` **{function}** function ({name, originalFilename, mimetype}) -> boolean + +**Note:** use an outside variable to cancel all uploads upon the first error + +```js +const options = { + filter: function ({name, originalFilename, mimetype}) { + // keep only images + return mimetype && mimetype.includes("image"); + } +}; +``` + + +### .parse(request, callback) + +Parses an incoming Node.js `request` containing form data. If `callback` is +provided, all fields and files are collected and passed to the callback. + +```js +const formidable = require('formidable'); + +const form = formidable({ multiples: true, uploadDir: __dirname }); + +form.parse(req, (err, fields, files) => { + console.log('fields:', fields); + console.log('files:', files); +}); +``` + +You may overwrite this method if you are interested in directly accessing the +multipart stream. Doing so will disable any `'field'` / `'file'` events +processing which would occur otherwise, making you fully responsible for +handling the processing. + +About `uploadDir`, given the following directory structure +``` +project-name +├── src +│ └── server.js +│ +└── uploads + └── image.jpg +``` + +`__dirname` would be the same directory as the source file itself (src) + + +```js + `${__dirname}/../uploads` +``` + +to put files in uploads. + +Omitting `__dirname` would make the path relative to the current working directory. This would be the same if server.js is launched from src but not project-name. + + +`null` will use default which is `os.tmpdir()` + +Note: If the directory does not exist, the uploaded files are __silently discarded__. To make sure it exists: + +```js +import {createNecessaryDirectoriesSync} from "filesac"; + + +const uploadPath = `${__dirname}/../uploads`; +createNecessaryDirectoriesSync(`${uploadPath}/x`); +``` + + +In the example below, we listen on couple of events and direct them to the +`data` listener, so you can do whatever you choose there, based on whether its +before the file been emitted, the header value, the header name, on field, on +file and etc. + +Or the other way could be to just override the `form.onPart` as it's shown a bit +later. + +```js +form.once('error', console.error); + +form.on('fileBegin', (formname, file) => { + form.emit('data', { name: 'fileBegin', formname, value: file }); +}); + +form.on('file', (formname, file) => { + form.emit('data', { name: 'file', formname, value: file }); +}); + +form.on('field', (fieldName, fieldValue) => { + form.emit('data', { name: 'field', key: fieldName, value: fieldValue }); +}); + +form.once('end', () => { + console.log('Done!'); +}); + +// If you want to customize whatever you want... +form.on('data', ({ name, key, value, buffer, start, end, formname, ...more }) => { + if (name === 'partBegin') { + } + if (name === 'partData') { + } + if (name === 'headerField') { + } + if (name === 'headerValue') { + } + if (name === 'headerEnd') { + } + if (name === 'headersEnd') { + } + if (name === 'field') { + console.log('field name:', key); + console.log('field value:', value); + } + if (name === 'file') { + console.log('file:', formname, value); + } + if (name === 'fileBegin') { + console.log('fileBegin:', formname, value); + } +}); +``` + +### .use(plugin: Plugin) + +A method that allows you to extend the Formidable library. By default we include +4 plugins, which esentially are adapters to plug the different built-in parsers. + +**The plugins added by this method are always enabled.** + +_See [src/plugins/](./src/plugins/) for more detailed look on default plugins._ + +The `plugin` param has such signature: + +```typescript +function(formidable: Formidable, options: Options): void; +``` + +The architecture is simple. The `plugin` is a function that is passed with the +Formidable instance (the `form` across the README examples) and the options. + +**Note:** the plugin function's `this` context is also the same instance. + +```js +const formidable = require('formidable'); + +const form = formidable({ keepExtensions: true }); + +form.use((self, options) => { + // self === this === form + console.log('woohoo, custom plugin'); + // do your stuff; check `src/plugins` for inspiration +}); + +form.parse(req, (error, fields, files) => { + console.log('done!'); +}); +``` + +**Important to note**, is that inside plugin `this.options`, `self.options` and +`options` MAY or MAY NOT be the same. General best practice is to always use the +`this`, so you can later test your plugin independently and more easily. + +If you want to disable some parsing capabilities of Formidable, you can disable +the plugin which corresponds to the parser. For example, if you want to disable +multipart parsing (so the [src/parsers/Multipart.js](./src/parsers/Multipart.js) +which is used in [src/plugins/multipart.js](./src/plugins/multipart.js)), then +you can remove it from the `options.enabledPlugins`, like so + +```js +const { Formidable } = require('formidable'); + +const form = new Formidable({ + hashAlgorithm: 'sha1', + enabledPlugins: ['octetstream', 'querystring', 'json'], +}); +``` + +**Be aware** that the order _MAY_ be important too. The names corresponds 1:1 to +files in [src/plugins/](./src/plugins) folder. + +Pull requests for new built-in plugins MAY be accepted - for example, more +advanced querystring parser. Add your plugin as a new file in `src/plugins/` +folder (lowercased) and follow how the other plugins are made. + +### form.onPart + +If you want to use Formidable to only handle certain parts for you, you can do +something similar. Or see +[#387](https://github.com/node-formidable/node-formidable/issues/387) for +inspiration, you can for example validate the mime-type. + +```js +const form = formidable(); + +form.onPart = (part) => { + part.on('data', (buffer) => { + // do whatever you want here + }); +}; +``` + +For example, force Formidable to be used only on non-file "parts" (i.e., html +fields) + +```js +const form = formidable(); + +form.onPart = function (part) { + // let formidable handle only non-file parts + if (part.originalFilename === '' || !part.mimetype) { + // used internally, please do not override! + form._handlePart(part); + } +}; +``` + +### File + +```ts +export interface File { + // The size of the uploaded file in bytes. + // If the file is still being uploaded (see `'fileBegin'` event), + // this property says how many bytes of the file have been written to disk yet. + file.size: number; + + // The path this file is being written to. You can modify this in the `'fileBegin'` event in + // case you are unhappy with the way formidable generates a temporary path for your files. + file.filepath: string; + + // The name this file had according to the uploading client. + file.originalFilename: string | null; + + // calculated based on options provided + file.newFilename: string | null; + + // The mime type of this file, according to the uploading client. + file.mimetype: string | null; + + // A Date object (or `null`) containing the time this file was last written to. + // Mostly here for compatibility with the [W3C File API Draft](http://dev.w3.org/2006/webapi/FileAPI/). + file.mtime: Date | null; + + file.hashAlgorithm: false | |'sha1' | 'md5' | 'sha256' + // If `options.hashAlgorithm` calculation was set, you can read the hex digest out of this var (at the end it will be a string) + file.hash: string | object | null; +} +``` + +#### file.toJSON() + +This method returns a JSON-representation of the file, allowing you to +`JSON.stringify()` the file which is useful for logging and responding to +requests. + +### Events + +#### `'progress'` + +Emitted after each incoming chunk of data that has been parsed. Can be used to +roll your own progress bar. + +```js +form.on('progress', (bytesReceived, bytesExpected) => {}); +``` + +#### `'field'` + +Emitted whenever a field / value pair has been received. + +```js +form.on('field', (name, value) => {}); +``` + +#### `'fileBegin'` + +Emitted whenever a new file is detected in the upload stream. Use this event if +you want to stream the file to somewhere else while buffering the upload on the +file system. + +```js +form.on('fileBegin', (formName, file) => { + // accessible here + // formName the name in the form () or http filename for octetstream + // file.originalFilename http filename or null if there was a parsing error + // file.newFilename generated hexoid or what options.filename returned + // file.filepath default pathnme as per options.uploadDir and options.filename + // file.filepath = CUSTOM_PATH // to change the final path +}); +``` + +#### `'file'` + +Emitted whenever a field / file pair has been received. `file` is an instance of +`File`. + +```js +form.on('file', (formname, file) => { + // same as fileBegin, except + // it is too late to change file.filepath + // file.hash is available if options.hash was used +}); +``` + +#### `'error'` + +Emitted when there is an error processing the incoming form. A request that +experiences an error is automatically paused, you will have to manually call +`request.resume()` if you want the request to continue firing `'data'` events. + +May have `error.httpCode` and `error.code` attached. + +```js +form.on('error', (err) => {}); +``` + +#### `'aborted'` + +Emitted when the request was aborted by the user. Right now this can be due to a +'timeout' or 'close' event on the socket. After this event is emitted, an +`error` event will follow. In the future there will be a separate 'timeout' +event (needs a change in the node core). + +```js +form.on('aborted', () => {}); +``` + +#### `'end'` + +Emitted when the entire request has been received, and all contained files have +finished flushing to disk. This is a great place for you to send your response. + +```js +form.on('end', () => {}); +``` + +## Changelog + +[./CHANGELOG.md](./CHANGELOG.md) + +## Ports & Credits + +- [multipart-parser](http://github.com/FooBarWidget/multipart-parser): a C++ + parser based on formidable +- [Ryan Dahl](http://twitter.com/ryah) for his work on + [http-parser](http://github.com/ry/http-parser) which heavily inspired the + initial `multipart_parser.js`. + +## Contributing + +If the documentation is unclear or has a typo, please click on the page's `Edit` +button (pencil icon) and suggest a correction. If you would like to help us fix +a bug or add a new feature, please check our [Contributing +Guide][contributing-url]. Pull requests are welcome! + +Thanks goes to these wonderful people +([emoji key](https://allcontributors.org/docs/en/emoji-key)): + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Felix Geisendörfer

💻 🎨 🤔 📖

Charlike Mike Reagent

🐛 🚇 🎨 💻 📖 💡 🤔 🚧 ⚠️

Kedar

💻 ⚠️ 💬 🐛

Walle Cyril

💬 🐛 💻 💵 🤔 🚧

Xargs

💬 🐛 💻 🚧

Amit-A

💬 🐛 💻

Charmander

💬 🐛 💻 🤔 🚧

Dylan Piercey

🤔

Adam Dobrawy

🐛 📖

amitrohatgi

🤔

Jesse Feng

🐛

Nathanael Demacon

💬 💻 👀

MunMunMiao

🐛

Gabriel Petrovay

🐛 💻

Philip Woods

💻 🤔

Dmitry Ivonin

📖

Claudio Poli

💻
+ + + + + + +From a [Felix blog post](https://felixge.de/2013/03/11/the-pull-request-hack/): + +- [Sven Lito](https://github.com/svnlto) for fixing bugs and merging patches +- [egirshov](https://github.com/egirshov) for contributing many improvements to the node-formidable multipart parser +- [Andrew Kelley](https://github.com/superjoe30) for also helping with fixing bugs and making improvements +- [Mike Frey](https://github.com/mikefrey) for contributing JSON support + +## License + +Formidable is licensed under the [MIT License][license-url]. + + + + +[codestyle-url]: https://github.com/airbnb/javascript +[codestyle-img]: https://badgen.net/badge/code%20style/airbnb%20%2B%20prettier/ff5a5f?icon=airbnb&cache=300 +[codecov-url]: https://codecov.io/gh/node-formidable/formidable +[codecov-img]: https://badgen.net/codecov/c/github/node-formidable/formidable/master?icon=codecov +[npmv-canary-img]: https://badgen.net/npm/v/formidable/canary?icon=npm +[npmv-dev-img]: https://badgen.net/npm/v/formidable/dev?icon=npm +[npmv-img]: https://badgen.net/npm/v/formidable?icon=npm +[npmv-url]: https://npmjs.com/package/formidable +[license-img]: https://badgen.net/npm/license/formidable +[license-url]: https://github.com/node-formidable/formidable/blob/master/LICENSE +[chat-img]: https://badgen.net/badge/chat/on%20gitter/46BC99?icon=gitter +[chat-url]: https://gitter.im/node-formidable/Lobby +[libera-manifesto-url]: https://liberamanifesto.com +[libera-manifesto-img]: https://badgen.net/badge/libera/manifesto/grey +[renovateapp-url]: https://renovatebot.com +[renovateapp-img]: https://badgen.net/badge/renovate/enabled/green?cache=300 +[prs-welcome-img]: https://badgen.net/badge/PRs/welcome/green?cache=300 +[prs-welcome-url]: http://makeapullrequest.com +[twitter-url]: https://twitter.com/3a1fcBx0 +[twitter-img]: https://badgen.net/twitter/follow/3a1fcBx0?icon=twitter&color=1da1f2&cache=300 + +[npm-weekly-img]: https://badgen.net/npm/dw/formidable?icon=npm&cache=300 +[npm-monthly-img]: https://badgen.net/npm/dm/formidable?icon=npm&cache=300 +[npm-yearly-img]: https://badgen.net/npm/dy/formidable?icon=npm&cache=300 +[npm-alltime-img]: https://badgen.net/npm/dt/formidable?icon=npm&cache=300&label=total%20downloads + +[nodejs-img]: https://badgen.net/badge/node/>=%2010.13/green?cache=300 + +[ccommits-url]: https://conventionalcommits.org/ +[ccommits-img]: https://badgen.net/badge/conventional%20commits/v1.0.0/green?cache=300 + +[contributing-url]: https://github.com/node-formidable/.github/blob/master/CONTRIBUTING.md +[code_of_conduct-url]: https://github.com/node-formidable/.github/blob/master/CODE_OF_CONDUCT.md + +[open-issue-url]: https://github.com/node-formidable/formidable/issues/new + +[tidelift-url]: https://tidelift.com/subscription/pkg/npm-formidable?utm_source=npm-formidable&utm_medium=referral&utm_campaign=enterprise +[tidelift-img]: https://badgen.net/badge/tidelift/subscription/4B5168?labelColor=F6914D + +[kofi-url]: https://ko-fi.com/tunnckoCore/commissions +[kofi-img]: https://badgen.net/badge/ko-fi/support/29abe0c2?cache=300&icon=https://rawcdn.githack.com/tunnckoCore/badgen-icons/f8264c6414e0bec449dd86f2241d50a9b89a1203/icons/kofi.svg + +[linux-build-img]: https://badgen.net/github/checks/node-formidable/formidable/master/ubuntu?cache=300&label=linux%20build&icon=github +[macos-build-img]: https://badgen.net/github/checks/node-formidable/formidable/master/macos?cache=300&label=macos%20build&icon=github +[windows-build-img]: https://badgen.net/github/checks/node-formidable/formidable/master/windows?cache=300&label=windows%20build&icon=github +[build-url]: https://github.com/node-formidable/formidable/actions?query=workflow%3Anodejs + diff --git a/node_modules/formidable/node_modules/qs/.editorconfig b/node_modules/formidable/node_modules/qs/.editorconfig new file mode 100644 index 0000000..b442a9f --- /dev/null +++ b/node_modules/formidable/node_modules/qs/.editorconfig @@ -0,0 +1,33 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 160 + +[test/*] +max_line_length = off + +[LICENSE.md] +indent_size = off + +[*.md] +max_line_length = off + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[LICENSE] +indent_size = 2 +max_line_length = off diff --git a/node_modules/formidable/node_modules/qs/.eslintignore b/node_modules/formidable/node_modules/qs/.eslintignore new file mode 100644 index 0000000..1521c8b --- /dev/null +++ b/node_modules/formidable/node_modules/qs/.eslintignore @@ -0,0 +1 @@ +dist diff --git a/node_modules/formidable/node_modules/qs/.eslintrc b/node_modules/formidable/node_modules/qs/.eslintrc new file mode 100644 index 0000000..e3bde89 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/.eslintrc @@ -0,0 +1,21 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "complexity": 0, + "consistent-return": 1, + "func-name-matching": 0, + "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], + "indent": [2, 4], + "max-lines-per-function": [2, { "max": 150 }], + "max-params": [2, 14], + "max-statements": [2, 52], + "multiline-comment-style": 0, + "no-continue": 1, + "no-magic-numbers": 0, + "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], + "operator-linebreak": [2, "before"], + } +} diff --git a/node_modules/formidable/node_modules/qs/.github/FUNDING.yml b/node_modules/formidable/node_modules/qs/.github/FUNDING.yml new file mode 100644 index 0000000..0355f4f --- /dev/null +++ b/node_modules/formidable/node_modules/qs/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/qs +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with a single custom sponsorship URL diff --git a/node_modules/formidable/node_modules/qs/.github/workflows/rebase.yml b/node_modules/formidable/node_modules/qs/.github/workflows/rebase.yml new file mode 100644 index 0000000..436cb79 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/.github/workflows/rebase.yml @@ -0,0 +1,15 @@ +name: Automatic Rebase + +on: [pull_request] + +jobs: + _: + name: "Automatic Rebase" + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + - uses: ljharb/rebase@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/formidable/node_modules/qs/CHANGELOG.md b/node_modules/formidable/node_modules/qs/CHANGELOG.md new file mode 100644 index 0000000..f09f694 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/CHANGELOG.md @@ -0,0 +1,339 @@ +## **6.9.3** +- [Fix] proper comma parsing of URL-encoded commas (#361) +- [Fix] parses comma delimited array while having percent-encoded comma treated as normal text (#336) + +## **6.9.2** +- [Fix] `parse`: Fix parsing array from object with `comma` true (#359) +- [Fix] `parse`: throw a TypeError instead of an Error for bad charset (#349) +- [meta] ignore eclint transitive audit warning +- [meta] fix indentation in package.json +- [meta] add tidelift marketing copy +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `object-inspect`, `has-symbols`, `tape`, `mkdirp`, `iconv-lite` +- [actions] add automatic rebasing / merge commit blocking + +## **6.9.1** +- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) +- [Fix] `parse`: with comma true, do not split non-string values (#334) +- [meta] add `funding` field +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` +- [Tests] use shared travis-ci config + +## **6.9.0** +- [New] `parse`/`stringify`: Pass extra key/value argument to `decoder` (#333) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `evalmd` +- [Tests] `parse`: add passing `arrayFormat` tests +- [Tests] add `posttest` using `npx aud` to run `npm audit` without a lockfile +- [Tests] up to `node` `v12.10`, `v11.15`, `v10.16`, `v8.16` +- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray + +## **6.8.2** +- [Fix] proper comma parsing of URL-encoded commas (#361) +- [Fix] parses comma delimited array while having percent-encoded comma treated as normal text (#336) + +## **6.8.1** +- [Fix] `parse`: Fix parsing array from object with `comma` true (#359) +- [Fix] `parse`: throw a TypeError instead of an Error for bad charset (#349) +- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) +- [fix] `parse`: with comma true, do not split non-string values (#334) +- [meta] add tidelift marketing copy +- [meta] add `funding` field +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape`, `safe-publish-latest`, `evalmd`, `has-symbols`, `iconv-lite`, `mkdirp`, `object-inspect` +- [Tests] `parse`: add passing `arrayFormat` tests +- [Tests] use shared travis-ci configs +- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray +- [actions] add automatic rebasing / merge commit blocking + +## **6.8.0** +- [New] add `depth=false` to preserve the original key; [Fix] `depth=0` should preserve the original key (#326) +- [New] [Fix] stringify symbols and bigints +- [Fix] ensure node 0.12 can stringify Symbols +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Refactor] `formats`: tiny bit of cleanup. +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `safe-publish-latest`, `iconv-lite`, `tape` +- [Tests] add tests for `depth=0` and `depth=false` behavior, both current and intuitive/intended (#326) +- [Tests] use `eclint` instead of `editorconfig-tools` +- [docs] readme: add security note +- [meta] add github sponsorship +- [meta] add FUNDING.yml +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause + +## **6.7.2** +- [Fix] proper comma parsing of URL-encoded commas (#361) +- [Fix] parses comma delimited array while having percent-encoded comma treated as normal text (#336) + +## **6.7.1** +- [Fix] `parse`: Fix parsing array from object with `comma` true (#359) +- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) +- [fix] `parse`: with comma true, do not split non-string values (#334) +- [Fix] `parse`: throw a TypeError instead of an Error for bad charset (#349) +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Refactor] `formats`: tiny bit of cleanup. +- readme: add security note +- [meta] add tidelift marketing copy +- [meta] add `funding` field +- [meta] add FUNDING.yml +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape`, `safe-publish-latest`, `evalmd`, `iconv-lite`, `mkdirp`, `object-inspect`, `browserify` +- [Tests] `parse`: add passing `arrayFormat` tests +- [Tests] use shared travis-ci configs +- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray +- [Tests] add tests for `depth=0` and `depth=false` behavior, both current and intuitive/intended +- [Tests] use `eclint` instead of `editorconfig-tools` +- [actions] add automatic rebasing / merge commit blocking + +## **6.7.0** +- [New] `stringify`/`parse`: add `comma` as an `arrayFormat` option (#276, #219) +- [Fix] correctly parse nested arrays (#212) +- [Fix] `utils.merge`: avoid a crash with a null target and a truthy non-array source, also with an array source +- [Robustness] `stringify`: cache `Object.prototype.hasOwnProperty` +- [Refactor] `utils`: `isBuffer`: small tweak; add tests +- [Refactor] use cached `Array.isArray` +- [Refactor] `parse`/`stringify`: make a function to normalize the options +- [Refactor] `utils`: reduce observable [[Get]]s +- [Refactor] `stringify`/`utils`: cache `Array.isArray` +- [Tests] always use `String(x)` over `x.toString()` +- [Tests] fix Buffer tests to work in node < 4.5 and node < 5.10 +- [Tests] temporarily allow coverage to fail + +## **6.6.0** +- [New] Add support for iso-8859-1, utf8 "sentinel" and numeric entities (#268) +- [New] move two-value combine to a `utils` function (#189) +- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` (#260) +- [Fix] `stringify`: do not crash in an obscure combo of `interpretNumericEntities`, a bad custom `decoder`, & `iso-8859-1` +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) +- [Refactor] `parse`: only need to reassign the var once +- [Refactor] `parse`/`stringify`: clean up `charset` options checking; fix defaults +- [Refactor] add missing defaults +- [Refactor] `parse`: one less `concat` call +- [Refactor] `utils`: `compactQueue`: make it explicitly side-effecting +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`, `iconv-lite`, `safe-publish-latest`, `tape` +- [Tests] up to `node` `v10.10`, `v9.11`, `v8.12`, `v6.14`, `v4.9`; pin included builds to LTS + +## **6.5.2** +- [Fix] use `safer-buffer` instead of `Buffer` constructor +- [Refactor] utils: `module.exports` one thing, instead of mutating `exports` (#230) +- [Dev Deps] update `browserify`, `eslint`, `iconv-lite`, `safer-buffer`, `tape`, `browserify` + +## **6.5.1** +- [Fix] Fix parsing & compacting very deep objects (#224) +- [Refactor] name utils functions +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` +- [Tests] up to `node` `v8.4`; use `nvm install-latest-npm` so newer npm doesn’t break older node +- [Tests] Use precise dist for Node.js 0.6 runtime (#225) +- [Tests] make 0.6 required, now that it’s passing +- [Tests] on `node` `v8.2`; fix npm on node 0.6 + +## **6.5.0** +- [New] add `utils.assign` +- [New] pass default encoder/decoder to custom encoder/decoder functions (#206) +- [New] `parse`/`stringify`: add `ignoreQueryPrefix`/`addQueryPrefix` options, respectively (#213) +- [Fix] Handle stringifying empty objects with addQueryPrefix (#217) +- [Fix] do not mutate `options` argument (#207) +- [Refactor] `parse`: cache index to reuse in else statement (#182) +- [Docs] add various badges to readme (#208) +- [Dev Deps] update `eslint`, `browserify`, `iconv-lite`, `tape` +- [Tests] up to `node` `v8.1`, `v7.10`, `v6.11`; npm v4.6 breaks on node < v1; npm v5+ breaks on node < v4 +- [Tests] add `editorconfig-tools` + +## **6.4.0** +- [New] `qs.stringify`: add `encodeValuesOnly` option +- [Fix] follow `allowPrototypes` option during merge (#201, #201) +- [Fix] support keys starting with brackets (#202, #200) +- [Fix] chmod a-x +- [Dev Deps] update `eslint` +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds +- [eslint] reduce warnings + +## **6.3.2** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Dev Deps] update `eslint` +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.3.1** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties (thanks, @snyk!) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `iconv-lite`, `qs-iconv`, `tape` +- [Tests] on all node minors; improve test matrix +- [Docs] document stringify option `allowDots` (#195) +- [Docs] add empty object and array values example (#195) +- [Docs] Fix minor inconsistency/typo (#192) +- [Docs] document stringify option `sort` (#191) +- [Refactor] `stringify`: throw faster with an invalid encoder +- [Refactor] remove unnecessary escapes (#184) +- Remove contributing.md, since `qs` is no longer part of `hapi` (#183) + +## **6.3.0** +- [New] Add support for RFC 1738 (#174, #173) +- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159) +- [Fix] ensure `utils.merge` handles merging two arrays +- [Refactor] only constructors should be capitalized +- [Refactor] capitalized var names are for constructors only +- [Refactor] avoid using a sparse array +- [Robustness] `formats`: cache `String#replace` +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest` +- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix +- [Tests] flesh out arrayLimit/arrayFormat tests (#107) +- [Tests] skip Object.create tests when null objects are not available +- [Tests] Turn on eslint for test files (#175) + +## **6.2.3** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.2.2** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties + +## **6.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values +- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call` +- [Tests] remove `parallelshell` since it does not reliably report failures +- [Tests] up to `node` `v6.3`, `v5.12` +- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv` + +## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) +- [New] pass Buffers to the encoder/decoder directly (#161) +- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) +- [Fix] fix compacting of nested sparse arrays (#150) + +## **6.1.2 +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.1.1** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties + +## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) +- [New] allowDots option for `stringify` (#151) +- [Fix] "sort" option should work at a depth of 3 or more (#151) +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## **6.0.4** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.0.3** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) +- Revert ES6 requirement and restore support for node down to v0.8. + +## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) +- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json + +## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) +- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 + +## **5.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values + +## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) +- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string + +## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) +- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional +- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify + +## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) +- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false +- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm + +## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) +- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional + +## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) +- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" + +## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) +- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties +- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost +- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing +- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object +- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option +- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. +- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 +- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 +- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign +- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute + +## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) +- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function + +## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) +- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option + +## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) +- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 +- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader + +## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) +- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object + +## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) +- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". + +## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) +- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 + +## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) +- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? +- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 +- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 + +## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) +- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number + +## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) +- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array +- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x + +## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) +- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value +- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty +- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? + +## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) +- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 +- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects + +## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) +- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present +- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays +- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge +- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? + +## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) +- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter + +## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) +- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? +- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit +- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 + +## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) +- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values + +## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) +- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters +- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block + +## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) +- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument +- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed + +## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) +- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted +- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null +- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README + +## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) +- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/formidable/node_modules/qs/LICENSE.md b/node_modules/formidable/node_modules/qs/LICENSE.md new file mode 100644 index 0000000..fecf6b6 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2014, Nathan LaFreniere and other [contributors](https://github.com/ljharb/qs/graphs/contributors) +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/formidable/node_modules/qs/README.md b/node_modules/formidable/node_modules/qs/README.md new file mode 100644 index 0000000..84f2534 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/README.md @@ -0,0 +1,604 @@ +# qs [![Version Badge][2]][1] + +[![Build Status][3]][4] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +A querystring parsing and stringifying library with some added security. + +Lead Maintainer: [Jordan Harband](https://github.com/ljharb) + +The **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring). + +## Usage + +```javascript +var qs = require('qs'); +var assert = require('assert'); + +var obj = qs.parse('a=c'); +assert.deepEqual(obj, { a: 'c' }); + +var str = qs.stringify(obj); +assert.equal(str, 'a=c'); +``` + +### Parsing Objects + +[](#preventEval) +```javascript +qs.parse(string, [options]); +``` + +**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`. +For example, the string `'foo[bar]=baz'` converts to: + +```javascript +assert.deepEqual(qs.parse('foo[bar]=baz'), { + foo: { + bar: 'baz' + } +}); +``` + +When using the `plainObjects` option the parsed value is returned as a null object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like: + +```javascript +var nullObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true }); +assert.deepEqual(nullObject, { a: { hasOwnProperty: 'b' } }); +``` + +By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option. + +```javascript +var protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }); +assert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } }); +``` + +URI encoded strings work too: + +```javascript +assert.deepEqual(qs.parse('a%5Bb%5D=c'), { + a: { b: 'c' } +}); +``` + +You can also nest your objects, like `'foo[bar][baz]=foobarbaz'`: + +```javascript +assert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), { + foo: { + bar: { + baz: 'foobarbaz' + } + } +}); +``` + +By default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like +`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be: + +```javascript +var expected = { + a: { + b: { + c: { + d: { + e: { + f: { + '[g][h][i]': 'j' + } + } + } + } + } + } +}; +var string = 'a[b][c][d][e][f][g][h][i]=j'; +assert.deepEqual(qs.parse(string), expected); +``` + +This depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`: + +```javascript +var deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 }); +assert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }); +``` + +The depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number. + +For similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option: + +```javascript +var limited = qs.parse('a=b&c=d', { parameterLimit: 1 }); +assert.deepEqual(limited, { a: 'b' }); +``` + +To bypass the leading question mark, use `ignoreQueryPrefix`: + +```javascript +var prefixed = qs.parse('?a=b&c=d', { ignoreQueryPrefix: true }); +assert.deepEqual(prefixed, { a: 'b', c: 'd' }); +``` + +An optional delimiter can also be passed: + +```javascript +var delimited = qs.parse('a=b;c=d', { delimiter: ';' }); +assert.deepEqual(delimited, { a: 'b', c: 'd' }); +``` + +Delimiters can be a regular expression too: + +```javascript +var regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ }); +assert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' }); +``` + +Option `allowDots` can be used to enable dot notation: + +```javascript +var withDots = qs.parse('a.b=c', { allowDots: true }); +assert.deepEqual(withDots, { a: { b: 'c' } }); +``` + +If you have to deal with legacy browsers or services, there's +also support for decoding percent-encoded octets as iso-8859-1: + +```javascript +var oldCharset = qs.parse('a=%A7', { charset: 'iso-8859-1' }); +assert.deepEqual(oldCharset, { a: '§' }); +``` + +Some services add an initial `utf8=✓` value to forms so that old +Internet Explorer versions are more likely to submit the form as +utf-8. Additionally, the server can check the value against wrong +encodings of the checkmark character and detect that a query string +or `application/x-www-form-urlencoded` body was *not* sent as +utf-8, eg. if the form had an `accept-charset` parameter or the +containing page had a different character set. + +**qs** supports this mechanism via the `charsetSentinel` option. +If specified, the `utf8` parameter will be omitted from the +returned object. It will be used to switch to `iso-8859-1`/`utf-8` +mode depending on how the checkmark is encoded. + +**Important**: When you specify both the `charset` option and the +`charsetSentinel` option, the `charset` will be overridden when +the request contains a `utf8` parameter from which the actual +charset can be deduced. In that sense the `charset` will behave +as the default charset rather than the authoritative charset. + +```javascript +var detectedAsUtf8 = qs.parse('utf8=%E2%9C%93&a=%C3%B8', { + charset: 'iso-8859-1', + charsetSentinel: true +}); +assert.deepEqual(detectedAsUtf8, { a: 'ø' }); + +// Browsers encode the checkmark as ✓ when submitting as iso-8859-1: +var detectedAsIso8859_1 = qs.parse('utf8=%26%2310003%3B&a=%F8', { + charset: 'utf-8', + charsetSentinel: true +}); +assert.deepEqual(detectedAsIso8859_1, { a: 'ø' }); +``` + +If you want to decode the `&#...;` syntax to the actual character, +you can specify the `interpretNumericEntities` option as well: + +```javascript +var detectedAsIso8859_1 = qs.parse('a=%26%239786%3B', { + charset: 'iso-8859-1', + interpretNumericEntities: true +}); +assert.deepEqual(detectedAsIso8859_1, { a: '☺' }); +``` + +It also works when the charset has been detected in `charsetSentinel` +mode. + +### Parsing Arrays + +**qs** can also parse arrays using a similar `[]` notation: + +```javascript +var withArray = qs.parse('a[]=b&a[]=c'); +assert.deepEqual(withArray, { a: ['b', 'c'] }); +``` + +You may specify an index as well: + +```javascript +var withIndexes = qs.parse('a[1]=c&a[0]=b'); +assert.deepEqual(withIndexes, { a: ['b', 'c'] }); +``` + +Note that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number +to create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving +their order: + +```javascript +var noSparse = qs.parse('a[1]=b&a[15]=c'); +assert.deepEqual(noSparse, { a: ['b', 'c'] }); +``` + +Note that an empty string is also a value, and will be preserved: + +```javascript +var withEmptyString = qs.parse('a[]=&a[]=b'); +assert.deepEqual(withEmptyString, { a: ['', 'b'] }); + +var withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c'); +assert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] }); +``` + +**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will +instead be converted to an object with the index as the key. This is needed to handle cases when someone sent, for example, `a[999999999]` and it will take significant time to iterate over this huge array. + +```javascript +var withMaxIndex = qs.parse('a[100]=b'); +assert.deepEqual(withMaxIndex, { a: { '100': 'b' } }); +``` + +This limit can be overridden by passing an `arrayLimit` option: + +```javascript +var withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 }); +assert.deepEqual(withArrayLimit, { a: { '1': 'b' } }); +``` + +To disable array parsing entirely, set `parseArrays` to `false`. + +```javascript +var noParsingArrays = qs.parse('a[]=b', { parseArrays: false }); +assert.deepEqual(noParsingArrays, { a: { '0': 'b' } }); +``` + +If you mix notations, **qs** will merge the two items into an object: + +```javascript +var mixedNotation = qs.parse('a[0]=b&a[b]=c'); +assert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } }); +``` + +You can also create arrays of objects: + +```javascript +var arraysOfObjects = qs.parse('a[][b]=c'); +assert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] }); +``` + +Some people use comma to join array, **qs** can parse it: +```javascript +var arraysOfObjects = qs.parse('a=b,c', { comma: true }) +assert.deepEqual(arraysOfObjects, { a: ['b', 'c'] }) +``` +(_this cannot convert nested objects, such as `a={b:1},{c:d}`_) + +### Stringifying + +[](#preventEval) +```javascript +qs.stringify(object, [options]); +``` + +When stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect: + +```javascript +assert.equal(qs.stringify({ a: 'b' }), 'a=b'); +assert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); +``` + +This encoding can be disabled by setting the `encode` option to `false`: + +```javascript +var unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false }); +assert.equal(unencoded, 'a[b]=c'); +``` + +Encoding can be disabled for keys by setting the `encodeValuesOnly` option to `true`: +```javascript +var encodedValues = qs.stringify( + { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, + { encodeValuesOnly: true } +); +assert.equal(encodedValues,'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'); +``` + +This encoding can also be replaced by a custom encoding method set as `encoder` option: + +```javascript +var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str) { + // Passed in values `a`, `b`, `c` + return // Return encoded string +}}) +``` + +_(Note: the `encoder` option does not apply if `encode` is `false`)_ + +Analogue to the `encoder` there is a `decoder` option for `parse` to override decoding of properties and values: + +```javascript +var decoded = qs.parse('x=z', { decoder: function (str) { + // Passed in values `x`, `z` + return // Return decoded string +}}) +``` + +You can encode keys and values using different logic by using the type argument provided to the encoder: + +```javascript +var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str, defaultEncoder, charset, type) { + if (type === 'key') { + return // Encoded key + } else if (type === 'value') { + return // Encoded value + } +}}) +``` + +The type argument is also provided to the decoder: + +```javascript +var decoded = qs.parse('x=z', { decoder: function (str, defaultEncoder, charset, type) { + if (type === 'key') { + return // Decoded key + } else if (type === 'value') { + return // Decoded value + } +}}) +``` + +Examples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage. + +When arrays are stringified, by default they are given explicit indices: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }); +// 'a[0]=b&a[1]=c&a[2]=d' +``` + +You may override this by setting the `indices` option to `false`: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }); +// 'a=b&a=c&a=d' +``` + +You may use the `arrayFormat` option to specify the format of the output array: + +```javascript +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }) +// 'a[0]=b&a[1]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }) +// 'a[]=b&a[]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }) +// 'a=b&a=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'comma' }) +// 'a=b,c' +``` + +When objects are stringified, by default they use bracket notation: + +```javascript +qs.stringify({ a: { b: { c: 'd', e: 'f' } } }); +// 'a[b][c]=d&a[b][e]=f' +``` + +You may override this to use dot notation by setting the `allowDots` option to `true`: + +```javascript +qs.stringify({ a: { b: { c: 'd', e: 'f' } } }, { allowDots: true }); +// 'a.b.c=d&a.b.e=f' +``` + +Empty strings and null values will omit the value, but the equals sign (=) remains in place: + +```javascript +assert.equal(qs.stringify({ a: '' }), 'a='); +``` + +Key with no values (such as an empty object or array) will return nothing: + +```javascript +assert.equal(qs.stringify({ a: [] }), ''); +assert.equal(qs.stringify({ a: {} }), ''); +assert.equal(qs.stringify({ a: [{}] }), ''); +assert.equal(qs.stringify({ a: { b: []} }), ''); +assert.equal(qs.stringify({ a: { b: {}} }), ''); +``` + +Properties that are set to `undefined` will be omitted entirely: + +```javascript +assert.equal(qs.stringify({ a: null, b: undefined }), 'a='); +``` + +The query string may optionally be prepended with a question mark: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { addQueryPrefix: true }), '?a=b&c=d'); +``` + +The delimiter may be overridden with stringify as well: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); +``` + +If you only want to override the serialization of `Date` objects, you can provide a `serializeDate` option: + +```javascript +var date = new Date(7); +assert.equal(qs.stringify({ a: date }), 'a=1970-01-01T00:00:00.007Z'.replace(/:/g, '%3A')); +assert.equal( + qs.stringify({ a: date }, { serializeDate: function (d) { return d.getTime(); } }), + 'a=7' +); +``` + +You may use the `sort` option to affect the order of parameter keys: + +```javascript +function alphabeticalSort(a, b) { + return a.localeCompare(b); +} +assert.equal(qs.stringify({ a: 'c', z: 'y', b : 'f' }, { sort: alphabeticalSort }), 'a=c&b=f&z=y'); +``` + +Finally, you can use the `filter` option to restrict which keys will be included in the stringified output. +If you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you +pass an array, it will be used to select properties and array indices for stringification: + +```javascript +function filterFunc(prefix, value) { + if (prefix == 'b') { + // Return an `undefined` value to omit a property. + return; + } + if (prefix == 'e[f]') { + return value.getTime(); + } + if (prefix == 'e[g][0]') { + return value * 2; + } + return value; +} +qs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc }); +// 'a=b&c=d&e[f]=123&e[g][0]=4' +qs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] }); +// 'a=b&e=f' +qs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] }); +// 'a[0]=b&a[2]=d' +``` + +### Handling of `null` values + +By default, `null` values are treated like empty strings: + +```javascript +var withNull = qs.stringify({ a: null, b: '' }); +assert.equal(withNull, 'a=&b='); +``` + +Parsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings. + +```javascript +var equalsInsensitive = qs.parse('a&b='); +assert.deepEqual(equalsInsensitive, { a: '', b: '' }); +``` + +To distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null` +values have no `=` sign: + +```javascript +var strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true }); +assert.equal(strictNull, 'a&b='); +``` + +To parse values without `=` back to `null` use the `strictNullHandling` flag: + +```javascript +var parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true }); +assert.deepEqual(parsedStrictNull, { a: null, b: '' }); +``` + +To completely skip rendering keys with `null` values, use the `skipNulls` flag: + +```javascript +var nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true }); +assert.equal(nullsSkipped, 'a=b'); +``` + +If you're communicating with legacy systems, you can switch to `iso-8859-1` +using the `charset` option: + +```javascript +var iso = qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }); +assert.equal(iso, '%E6=%E6'); +``` + +Characters that don't exist in `iso-8859-1` will be converted to numeric +entities, similar to what browsers do: + +```javascript +var numeric = qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }); +assert.equal(numeric, 'a=%26%239786%3B'); +``` + +You can use the `charsetSentinel` option to announce the character by +including an `utf8=✓` parameter with the proper encoding if the checkmark, +similar to what Ruby on Rails and others do when submitting forms. + +```javascript +var sentinel = qs.stringify({ a: '☺' }, { charsetSentinel: true }); +assert.equal(sentinel, 'utf8=%E2%9C%93&a=%E2%98%BA'); + +var isoSentinel = qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }); +assert.equal(isoSentinel, 'utf8=%26%2310003%3B&a=%E6'); +``` + +### Dealing with special character sets + +By default the encoding and decoding of characters is done in `utf-8`, +and `iso-8859-1` support is also built in via the `charset` parameter. + +If you wish to encode querystrings to a different character set (i.e. +[Shift JIS](https://en.wikipedia.org/wiki/Shift_JIS)) you can use the +[`qs-iconv`](https://github.com/martinheidegger/qs-iconv) library: + +```javascript +var encoder = require('qs-iconv/encoder')('shift_jis'); +var shiftJISEncoded = qs.stringify({ a: 'こんにちは!' }, { encoder: encoder }); +assert.equal(shiftJISEncoded, 'a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I'); +``` + +This also works for decoding of query strings: + +```javascript +var decoder = require('qs-iconv/decoder')('shift_jis'); +var obj = qs.parse('a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I', { decoder: decoder }); +assert.deepEqual(obj, { a: 'こんにちは!' }); +``` + +### RFC 3986 and RFC 1738 space encoding + +RFC3986 used as default option and encodes ' ' to *%20* which is backward compatible. +In the same time, output can be stringified as per RFC1738 with ' ' equal to '+'. + +``` +assert.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC3986' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC1738' }), 'a=b+c'); +``` + +## Security + +Please email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report. + +## qs for enterprise + +Available as part of the Tidelift Subscription + +The maintainers of qs and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-qs?utm_source=npm-qs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + +[1]: https://npmjs.org/package/qs +[2]: http://versionbadg.es/ljharb/qs.svg +[3]: https://api.travis-ci.org/ljharb/qs.svg +[4]: https://travis-ci.org/ljharb/qs +[5]: https://david-dm.org/ljharb/qs.svg +[6]: https://david-dm.org/ljharb/qs +[7]: https://david-dm.org/ljharb/qs/dev-status.svg +[8]: https://david-dm.org/ljharb/qs?type=dev +[9]: https://ci.testling.com/ljharb/qs.png +[10]: https://ci.testling.com/ljharb/qs +[11]: https://nodei.co/npm/qs.png?downloads=true&stars=true +[license-image]: http://img.shields.io/npm/l/qs.svg +[license-url]: LICENSE +[downloads-image]: http://img.shields.io/npm/dm/qs.svg +[downloads-url]: http://npm-stat.com/charts.html?package=qs diff --git a/node_modules/formidable/node_modules/qs/dist/qs.js b/node_modules/formidable/node_modules/qs/dist/qs.js new file mode 100644 index 0000000..752629d --- /dev/null +++ b/node_modules/formidable/node_modules/qs/dist/qs.js @@ -0,0 +1,832 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i -1) { + return val.split(','); + } + + return val; +}; + +var maybeMap = function maybeMap(val, fn) { + if (isArray(val)) { + var mapped = []; + for (var i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = maybeMap( + parseArrayValue(part.slice(pos + 1), options), + function (encodedVal) { + return options.decoder(encodedVal, defaults.decoder, charset, 'value'); + } + ); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options, valuesParsed) { + var leaf = valuesParsed ? val : parseArrayValue(val, options); + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; // eslint-disable-line no-param-reassign + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options, valuesParsed); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + // eslint-disable-next-line no-implicit-coercion, no-extra-parens + depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string'); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; + +},{"./utils":5}],4:[function(require,module,exports){ +'use strict'; + +var utils = require('./utils'); +var formats = require('./formats'); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var isArray = Array.isArray; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' + || typeof v === 'number' + || typeof v === 'boolean' + || typeof v === 'symbol' + || typeof v === 'bigint'; +}; + +var stringify = function stringify( + object, + prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset +) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = obj.join(','); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key') : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key'); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value'))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (isArray(obj)) { + pushToArray(values, stringify( + obj[key], + typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } else { + pushToArray(values, stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.formatter, + options.encodeValuesOnly, + options.charset + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; + +},{"./formats":1,"./utils":5}],5:[function(require,module,exports){ +'use strict'; + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + /* eslint no-param-reassign: 0 */ + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + merge: merge +}; + +},{}]},{},[2])(2) +}); diff --git a/node_modules/formidable/node_modules/qs/lib/formats.js b/node_modules/formidable/node_modules/qs/lib/formats.js new file mode 100644 index 0000000..a4ecca7 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/lib/formats.js @@ -0,0 +1,26 @@ +'use strict'; + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; + +var util = require('./utils'); + +var Format = { + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; + +module.exports = util.assign( + { + 'default': Format.RFC3986, + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return String(value); + } + } + }, + Format +); diff --git a/node_modules/formidable/node_modules/qs/lib/index.js b/node_modules/formidable/node_modules/qs/lib/index.js new file mode 100644 index 0000000..0d6a97d --- /dev/null +++ b/node_modules/formidable/node_modules/qs/lib/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var stringify = require('./stringify'); +var parse = require('./parse'); +var formats = require('./formats'); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; diff --git a/node_modules/formidable/node_modules/qs/lib/parse.js b/node_modules/formidable/node_modules/qs/lib/parse.js new file mode 100644 index 0000000..49d5c04 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/lib/parse.js @@ -0,0 +1,268 @@ +'use strict'; + +var utils = require('./utils'); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var defaults = { + allowDots: false, + allowPrototypes: false, + arrayLimit: 20, + charset: 'utf-8', + charsetSentinel: false, + comma: false, + decoder: utils.decode, + delimiter: '&', + depth: 5, + ignoreQueryPrefix: false, + interpretNumericEntities: false, + parameterLimit: 1000, + parseArrays: true, + plainObjects: false, + strictNullHandling: false +}; + +var interpretNumericEntities = function (str) { + return str.replace(/&#(\d+);/g, function ($0, numberStr) { + return String.fromCharCode(parseInt(numberStr, 10)); + }); +}; + +var parseArrayValue = function (val, options) { + if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) { + return val.split(','); + } + + return val; +}; + +var maybeMap = function maybeMap(val, fn) { + if (isArray(val)) { + var mapped = []; + for (var i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = maybeMap( + parseArrayValue(part.slice(pos + 1), options), + function (encodedVal) { + return options.decoder(encodedVal, defaults.decoder, charset, 'value'); + } + ); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options, valuesParsed) { + var leaf = valuesParsed ? val : parseArrayValue(val, options); + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; // eslint-disable-line no-param-reassign + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options, valuesParsed); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + // eslint-disable-next-line no-implicit-coercion, no-extra-parens + depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string'); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; diff --git a/node_modules/formidable/node_modules/qs/lib/stringify.js b/node_modules/formidable/node_modules/qs/lib/stringify.js new file mode 100644 index 0000000..72ded14 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/lib/stringify.js @@ -0,0 +1,279 @@ +'use strict'; + +var utils = require('./utils'); +var formats = require('./formats'); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var isArray = Array.isArray; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' + || typeof v === 'number' + || typeof v === 'boolean' + || typeof v === 'symbol' + || typeof v === 'bigint'; +}; + +var stringify = function stringify( + object, + prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset +) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = obj.join(','); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key') : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key'); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value'))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (isArray(obj)) { + pushToArray(values, stringify( + obj[key], + typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } else { + pushToArray(values, stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter, + encodeValuesOnly, + charset + )); + } + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && opts.encoder !== undefined && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.formatter, + options.encodeValuesOnly, + options.charset + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; diff --git a/node_modules/formidable/node_modules/qs/lib/utils.js b/node_modules/formidable/node_modules/qs/lib/utils.js new file mode 100644 index 0000000..bb97241 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/lib/utils.js @@ -0,0 +1,236 @@ +'use strict'; + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + /* eslint no-param-reassign: 0 */ + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + merge: merge +}; diff --git a/node_modules/formidable/node_modules/qs/package.json b/node_modules/formidable/node_modules/qs/package.json new file mode 100644 index 0000000..443cba7 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/package.json @@ -0,0 +1,69 @@ +{ + "name": "qs", + "description": "A querystring parser that supports nesting and arrays, with a depth limit", + "homepage": "https://github.com/ljharb/qs", + "version": "6.9.3", + "repository": { + "type": "git", + "url": "https://github.com/ljharb/qs.git" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "main": "lib/index.js", + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "keywords": [ + "querystring", + "qs", + "query", + "url", + "parse", + "stringify" + ], + "engines": { + "node": ">=0.6" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^16.0.0", + "browserify": "^16.5.0", + "covert": "^1.1.1", + "eclint": "^2.8.1", + "eslint": "^6.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "has-symbols": "^1.0.1", + "iconv-lite": "^0.5.1", + "mkdirp": "^0.5.4", + "object-inspect": "^1.7.0", + "qs-iconv": "^1.0.4", + "safe-publish-latest": "^1.1.4", + "safer-buffer": "^2.1.2", + "tape": "^5.0.0-next.5" + }, + "scripts": { + "prepublish": "safe-publish-latest && npm run dist", + "pretest": "npm run --silent readme && npm run --silent lint", + "test": "npm run --silent coverage", + "tests-only": "node test", + "posttest": "npx aud --production", + "readme": "evalmd README.md", + "postlint": "eclint check * lib/* test/*", + "lint": "eslint lib/*.js test/*.js", + "coverage": "covert test", + "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js" + }, + "license": "BSD-3-Clause", + "greenkeeper": { + "ignore": [ + "iconv-lite", + "mkdirp" + ] + } +} diff --git a/node_modules/formidable/node_modules/qs/test/.eslintrc b/node_modules/formidable/node_modules/qs/test/.eslintrc new file mode 100644 index 0000000..8ab8536 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/test/.eslintrc @@ -0,0 +1,18 @@ +{ + "rules": { + "array-bracket-newline": 0, + "array-element-newline": 0, + "consistent-return": 2, + "function-paren-newline": 0, + "max-lines": 0, + "max-lines-per-function": 0, + "max-nested-callbacks": [2, 3], + "max-statements": 0, + "no-buffer-constructor": 0, + "no-extend-native": 0, + "no-magic-numbers": 0, + "no-throw-literal": 0, + "object-curly-newline": 0, + "sort-keys": 0, + } +} diff --git a/node_modules/formidable/node_modules/qs/test/index.js b/node_modules/formidable/node_modules/qs/test/index.js new file mode 100644 index 0000000..5e6bc8f --- /dev/null +++ b/node_modules/formidable/node_modules/qs/test/index.js @@ -0,0 +1,7 @@ +'use strict'; + +require('./parse'); + +require('./stringify'); + +require('./utils'); diff --git a/node_modules/formidable/node_modules/qs/test/parse.js b/node_modules/formidable/node_modules/qs/test/parse.js new file mode 100644 index 0000000..b6ec1b7 --- /dev/null +++ b/node_modules/formidable/node_modules/qs/test/parse.js @@ -0,0 +1,772 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var utils = require('../lib/utils'); +var iconv = require('iconv-lite'); +var SaferBuffer = require('safer-buffer').Buffer; + +test('parse()', function (t) { + t.test('parses a simple string', function (st) { + st.deepEqual(qs.parse('0=foo'), { 0: 'foo' }); + st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); + st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); + st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); + st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); + st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); + st.deepEqual(qs.parse('foo'), { foo: '' }); + st.deepEqual(qs.parse('foo='), { foo: '' }); + st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); + st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); + st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); + st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); + st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); + st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); + st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); + st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { + cht: 'p3', + chd: 't:60,40', + chs: '250x100', + chl: 'Hello|World' + }); + st.end(); + }); + + t.test('arrayFormat: brackets allows only explicit arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'brackets' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('arrayFormat: indices allows only indexed arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'indices' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('arrayFormat: comma allows only comma-separated arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'comma' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('arrayFormat: repeat allows only repeated values', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'repeat' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('allows enabling dot notation', function (st) { + st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); + st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); + t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); + t.deepEqual( + qs.parse('a[b][c][d][e][f][g][h]=i'), + { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, + 'defaults to a depth of 5' + ); + + t.test('only parses one level when depth = 1', function (st) { + st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); + st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); + st.end(); + }); + + t.test('uses original key when depth = 0', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { depth: 0 }), { 'a[0]': 'b', 'a[1]': 'c' }); + st.deepEqual(qs.parse('a[0][0]=b&a[0][1]=c&a[1]=d&e=2', { depth: 0 }), { 'a[0][0]': 'b', 'a[0][1]': 'c', 'a[1]': 'd', e: '2' }); + st.end(); + }); + + t.test('uses original key when depth = false', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { depth: false }), { 'a[0]': 'b', 'a[1]': 'c' }); + st.deepEqual(qs.parse('a[0][0]=b&a[0][1]=c&a[1]=d&e=2', { depth: false }), { 'a[0][0]': 'b', 'a[0][1]': 'c', 'a[1]': 'd', e: '2' }); + st.end(); + }); + + t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); + + t.test('parses an explicit array', function (st) { + st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); + st.end(); + }); + + t.test('parses a mix of simple and explicit arrays', function (st) { + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a[1]=b&a=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a=b&a[1]=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + + st.end(); + }); + + t.test('parses a nested array', function (st) { + st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); + st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); + st.end(); + }); + + t.test('allows to specify array indices', function (st) { + st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); + st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 20 }), { a: ['c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 0 }), { a: { 1: 'c' } }); + st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); + st.end(); + }); + + t.test('limits specific array indices to arrayLimit', function (st) { + st.deepEqual(qs.parse('a[20]=a', { arrayLimit: 20 }), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a', { arrayLimit: 20 }), { a: { 21: 'a' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); + + t.test('supports encoded = signs', function (st) { + st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); + st.end(); + }); + + t.test('is ok with url encoded strings', function (st) { + st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); + st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); + st.end(); + }); + + t.test('allows brackets in the value', function (st) { + st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); + st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); + st.end(); + }); + + t.test('allows empty values', function (st) { + st.deepEqual(qs.parse(''), {}); + st.deepEqual(qs.parse(null), {}); + st.deepEqual(qs.parse(undefined), {}); + st.end(); + }); + + t.test('transforms arrays to objects', function (st) { + st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: false }), { a: { 0: 'b', t: 'u' } }); + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: true }), { a: { 0: 'b', t: 'u', hasOwnProperty: 'c' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: false }), { a: { 0: 'b', x: 'y' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: true }), { a: { 0: 'b', hasOwnProperty: 'c', x: 'y' } }); + st.end(); + }); + + t.test('transforms arrays to objects (dot notation)', function (st) { + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); + st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); + st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.end(); + }); + + t.test('correctly prunes undefined values when converting an array to an object', function (st) { + st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { 2: 'b', 99999999: 'c' } }); + st.end(); + }); + + t.test('supports malformed uri characters', function (st) { + st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); + st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); + st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); + st.end(); + }); + + t.test('doesn\'t produce empty keys', function (st) { + st.deepEqual(qs.parse('_r=1&'), { _r: '1' }); + st.end(); + }); + + t.test('cannot access Object prototype', function (st) { + qs.parse('constructor[prototype][bad]=bad'); + qs.parse('bad[constructor][prototype][bad]=bad'); + st.equal(typeof Object.prototype.bad, 'undefined'); + st.end(); + }); + + t.test('parses arrays of objects', function (st) { + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); + st.end(); + }); + + t.test('allows for empty strings in arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); + + st.deepEqual( + qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 20 + array indices: null then empty string works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]&a[]=c&a[]=', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 0 + array brackets: null then empty string works' + ); + + st.deepEqual( + qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 20 + array indices: empty string then null works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]=&a[]=c&a[]', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 0 + array brackets: empty string then null works' + ); + + st.deepEqual( + qs.parse('a[]=&a[]=b&a[]=c'), + { a: ['', 'b', 'c'] }, + 'array brackets: empty strings work' + ); + st.end(); + }); + + t.test('compacts sparse arrays', function (st) { + st.deepEqual(qs.parse('a[10]=1&a[2]=2', { arrayLimit: 20 }), { a: ['2', '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1', { arrayLimit: 20 }), { a: [{ b: [{ c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1', { arrayLimit: 20 }), { a: [[[{ c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1', { arrayLimit: 20 }), { a: [[[{ c: ['1'] }]]] }); + st.end(); + }); + + t.test('parses semi-parsed strings', function (st) { + st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); + st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); + st.end(); + }); + + t.test('parses buffers correctly', function (st) { + var b = SaferBuffer.from('test'); + st.deepEqual(qs.parse({ a: b }), { a: b }); + st.end(); + }); + + t.test('parses jquery-param strings', function (st) { + // readable = 'filter[0][]=int1&filter[0][]==&filter[0][]=77&filter[]=and&filter[2][]=int2&filter[2][]==&filter[2][]=8' + var encoded = 'filter%5B0%5D%5B%5D=int1&filter%5B0%5D%5B%5D=%3D&filter%5B0%5D%5B%5D=77&filter%5B%5D=and&filter%5B2%5D%5B%5D=int2&filter%5B2%5D%5B%5D=%3D&filter%5B2%5D%5B%5D=8'; + var expected = { filter: [['int1', '=', '77'], 'and', ['int2', '=', '8']] }; + st.deepEqual(qs.parse(encoded), expected); + st.end(); + }); + + t.test('continues parsing when no parent is found', function (st) { + st.deepEqual(qs.parse('[]=&a=b'), { 0: '', a: 'b' }); + st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { 0: null, a: 'b' }); + st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); + st.end(); + }); + + t.test('does not error when parsing a very long array', function (st) { + var str = 'a[]=a'; + while (Buffer.byteLength(str) < 128 * 1024) { + str = str + '&' + str; + } + + st.doesNotThrow(function () { + qs.parse(str); + }); + + st.end(); + }); + + t.test('should not throw when a native prototype has an enumerable property', function (st) { + Object.prototype.crash = ''; + Array.prototype.crash = ''; + st.doesNotThrow(qs.parse.bind(null, 'a=b')); + st.deepEqual(qs.parse('a=b'), { a: 'b' }); + st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + delete Object.prototype.crash; + delete Array.prototype.crash; + st.end(); + }); + + t.test('parses a string with an alternative string delimiter', function (st) { + st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('parses a string with an alternative RegExp delimiter', function (st) { + st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not use non-splittable objects as delimiters', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding parameter limit', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); + st.end(); + }); + + t.test('allows setting the parameter limit to Infinity', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding array limit', function (st) { + st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { 0: 'b' } }); + st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { 0: 'b', 1: 'c' } }); + st.end(); + }); + + t.test('allows disabling array parsing', function (st) { + var indices = qs.parse('a[0]=b&a[1]=c', { parseArrays: false }); + st.deepEqual(indices, { a: { 0: 'b', 1: 'c' } }); + st.equal(Array.isArray(indices.a), false, 'parseArrays:false, indices case is not an array'); + + var emptyBrackets = qs.parse('a[]=b', { parseArrays: false }); + st.deepEqual(emptyBrackets, { a: { 0: 'b' } }); + st.equal(Array.isArray(emptyBrackets.a), false, 'parseArrays:false, empty brackets case is not an array'); + + st.end(); + }); + + t.test('allows for query string prefix', function (st) { + st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); + st.deepEqual(qs.parse('foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); + st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: false }), { '?foo': 'bar' }); + st.end(); + }); + + t.test('parses an object', function (st) { + var input = { + 'user[name]': { 'pop[bob]': 3 }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses string with comma as array divider', function (st) { + st.deepEqual(qs.parse('foo=bar,tee', { comma: true }), { foo: ['bar', 'tee'] }); + st.deepEqual(qs.parse('foo[bar]=coffee,tee', { comma: true }), { foo: { bar: ['coffee', 'tee'] } }); + st.deepEqual(qs.parse('foo=', { comma: true }), { foo: '' }); + st.deepEqual(qs.parse('foo', { comma: true }), { foo: '' }); + st.deepEqual(qs.parse('foo', { comma: true, strictNullHandling: true }), { foo: null }); + st.end(); + }); + + t.test('parses values with comma as array divider', function (st) { + st.deepEqual(qs.parse({ foo: 'bar,tee' }, { comma: false }), { foo: 'bar,tee' }); + st.deepEqual(qs.parse({ foo: 'bar,tee' }, { comma: true }), { foo: ['bar', 'tee'] }); + st.end(); + }); + + t.test('use number decoder, parses string that has one number with comma option enabled', function (st) { + var decoder = function (str, defaultDecoder, charset, type) { + if (!isNaN(Number(str))) { + return parseFloat(str); + } + return defaultDecoder(str, defaultDecoder, charset, type); + }; + + st.deepEqual(qs.parse('foo=1', { comma: true, decoder: decoder }), { foo: 1 }); + st.deepEqual(qs.parse('foo=0', { comma: true, decoder: decoder }), { foo: 0 }); + + st.end(); + }); + + t.test('parses brackets holds array of arrays when having two parts of strings with comma as array divider', function (st) { + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=4,5,6', { comma: true }), { foo: [['1', '2', '3'], ['4', '5', '6']] }); + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=', { comma: true }), { foo: [['1', '2', '3'], ''] }); + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=,', { comma: true }), { foo: [['1', '2', '3'], ['', '']] }); + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=a', { comma: true }), { foo: [['1', '2', '3'], 'a'] }); + + st.end(); + }); + + t.test('parses comma delimited array while having percent-encoded comma treated as normal text', function (st) { + st.deepEqual(qs.parse('foo=a%2Cb', { comma: true }), { foo: 'a,b' }); + st.deepEqual(qs.parse('foo=a%2C%20b,d', { comma: true }), { foo: ['a, b', 'd'] }); + st.deepEqual(qs.parse('foo=a%2C%20b,c%2C%20d', { comma: true }), { foo: ['a, b', 'c, d'] }); + + st.end(); + }); + + t.test('parses an object in dot notation', function (st) { + var input = { + 'user.name': { 'pop[bob]': 3 }, + 'user.email.': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input, { allowDots: true }); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object and not child values', function (st) { + var input = { + 'user[name]': { 'pop[bob]': { test: 3 } }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': { test: 3 } }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.parse('a=b&c=d'); + global.Buffer = tempBuffer; + st.deepEqual(result, { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + var parsed; + + st.doesNotThrow(function () { + parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + st.equal('bar' in parsed.foo, true); + st.equal('baz' in parsed.foo, true); + st.equal(parsed.foo.bar, 'baz'); + st.deepEqual(parsed.foo.baz, a); + st.end(); + }); + + t.test('does not crash when parsing deep objects', function (st) { + var parsed; + var str = 'foo'; + + for (var i = 0; i < 5000; i++) { + str += '[p]'; + } + + str += '=bar'; + + st.doesNotThrow(function () { + parsed = qs.parse(str, { depth: 5000 }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + + var depth = 0; + var ref = parsed.foo; + while ((ref = ref.p)) { + depth += 1; + } + + st.equal(depth, 5000, 'parsed is 5000 properties deep'); + + st.end(); + }); + + t.test('parses null objects correctly', { skip: !Object.create }, function (st) { + var a = Object.create(null); + a.b = 'c'; + + st.deepEqual(qs.parse(a), { b: 'c' }); + var result = qs.parse({ a: a }); + st.equal('a' in result, true, 'result has "a" property'); + st.deepEqual(result.a, a); + st.end(); + }); + + t.test('parses dates correctly', function (st) { + var now = new Date(); + st.deepEqual(qs.parse({ a: now }), { a: now }); + st.end(); + }); + + t.test('parses regular expressions correctly', function (st) { + var re = /^test$/; + st.deepEqual(qs.parse({ a: re }), { a: re }); + st.end(); + }); + + t.test('does not allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: false }), {}); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: false }), {}); + + st.deepEqual( + qs.parse('toString', { allowPrototypes: false }), + {}, + 'bare "toString" results in {}' + ); + + st.end(); + }); + + t.test('can allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }); + + st.deepEqual( + qs.parse('toString', { allowPrototypes: true }), + { toString: '' }, + 'bare "toString" results in { toString: "" }' + ); + + st.end(); + }); + + t.test('params starting with a closing bracket', function (st) { + st.deepEqual(qs.parse(']=toString'), { ']': 'toString' }); + st.deepEqual(qs.parse(']]=toString'), { ']]': 'toString' }); + st.deepEqual(qs.parse(']hello]=toString'), { ']hello]': 'toString' }); + st.end(); + }); + + t.test('params starting with a starting bracket', function (st) { + st.deepEqual(qs.parse('[=toString'), { '[': 'toString' }); + st.deepEqual(qs.parse('[[=toString'), { '[[': 'toString' }); + st.deepEqual(qs.parse('[hello[=toString'), { '[hello[': 'toString' }); + st.end(); + }); + + t.test('add keys to objects', function (st) { + st.deepEqual( + qs.parse('a[b]=c&a=d'), + { a: { b: 'c', d: true } }, + 'can add keys to objects' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString'), + { a: { b: 'c' } }, + 'can not overwrite prototype' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString', { allowPrototypes: true }), + { a: { b: 'c', toString: true } }, + 'can overwrite prototype with allowPrototypes true' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString', { plainObjects: true }), + { __proto__: null, a: { __proto__: null, b: 'c', toString: true } }, + 'can overwrite prototype with plainObjects true' + ); + + st.end(); + }); + + t.test('can return null objects', { skip: !Object.create }, function (st) { + var expected = Object.create(null); + expected.a = Object.create(null); + expected.a.b = 'c'; + expected.a.hasOwnProperty = 'd'; + st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); + st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); + var expectedArray = Object.create(null); + expectedArray.a = Object.create(null); + expectedArray.a[0] = 'b'; + expectedArray.a.c = 'd'; + st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); + st.end(); + }); + + t.test('can parse with custom encoding', function (st) { + st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { + decoder: function (str) { + var reg = /%([0-9A-F]{2})/ig; + var result = []; + var parts = reg.exec(str); + while (parts) { + result.push(parseInt(parts[1], 16)); + parts = reg.exec(str); + } + return String(iconv.decode(SaferBuffer.from(result), 'shift_jis')); + } + }), { 県: '大阪府' }); + st.end(); + }); + + t.test('receives the default decoder as a second argument', function (st) { + st.plan(1); + qs.parse('a', { + decoder: function (str, defaultDecoder) { + st.equal(defaultDecoder, utils.decode); + } + }); + st.end(); + }); + + t.test('throws error with wrong decoder', function (st) { + st['throws'](function () { + qs.parse({}, { decoder: 'string' }); + }, new TypeError('Decoder has to be a function.')); + st.end(); + }); + + t.test('does not mutate the options argument', function (st) { + var options = {}; + qs.parse('a[b]=true', options); + st.deepEqual(options, {}); + st.end(); + }); + + t.test('throws if an invalid charset is specified', function (st) { + st['throws'](function () { + qs.parse('a=b', { charset: 'foobar' }); + }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); + st.end(); + }); + + t.test('parses an iso-8859-1 string if asked to', function (st) { + st.deepEqual(qs.parse('%A2=%BD', { charset: 'iso-8859-1' }), { '¢': '½' }); + st.end(); + }); + + var urlEncodedCheckmarkInUtf8 = '%E2%9C%93'; + var urlEncodedOSlashInUtf8 = '%C3%B8'; + var urlEncodedNumCheckmark = '%26%2310003%3B'; + var urlEncodedNumSmiley = '%26%239786%3B'; + + t.test('prefers an utf-8 charset specified by the utf8 sentinel to a default charset of iso-8859-1', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'iso-8859-1' }), { ø: 'ø' }); + st.end(); + }); + + t.test('prefers an iso-8859-1 charset specified by the utf8 sentinel to a default charset of utf-8', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { 'ø': 'ø' }); + st.end(); + }); + + t.test('does not require the utf8 sentinel to be defined before the parameters whose decoding it affects', function (st) { + st.deepEqual(qs.parse('a=' + urlEncodedOSlashInUtf8 + '&utf8=' + urlEncodedNumCheckmark, { charsetSentinel: true, charset: 'utf-8' }), { a: 'ø' }); + st.end(); + }); + + t.test('should ignore an utf8 sentinel with an unknown value', function (st) { + st.deepEqual(qs.parse('utf8=foo&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { ø: 'ø' }); + st.end(); + }); + + t.test('uses the utf8 sentinel to switch to utf-8 when no default charset is given', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { ø: 'ø' }); + st.end(); + }); + + t.test('uses the utf8 sentinel to switch to iso-8859-1 when no default charset is given', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { 'ø': 'ø' }); + st.end(); + }); + + t.test('interprets numeric entities in iso-8859-1 when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1', interpretNumericEntities: true }), { foo: '☺' }); + st.end(); + }); + + t.test('handles a custom decoder returning `null`, in the `iso-8859-1` charset, when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=&bar=' + urlEncodedNumSmiley, { + charset: 'iso-8859-1', + decoder: function (str, defaultDecoder, charset) { + return str ? defaultDecoder(str, defaultDecoder, charset) : null; + }, + interpretNumericEntities: true + }), { foo: null, bar: '☺' }); + st.end(); + }); + + t.test('does not interpret numeric entities in iso-8859-1 when `interpretNumericEntities` is absent', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1' }), { foo: '☺' }); + st.end(); + }); + + t.test('does not interpret numeric entities when the charset is utf-8, even when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'utf-8', interpretNumericEntities: true }), { foo: '☺' }); + st.end(); + }); + + t.test('does not interpret %uXXXX syntax in iso-8859-1 mode', function (st) { + st.deepEqual(qs.parse('%u263A=%u263A', { charset: 'iso-8859-1' }), { '%u263A': '%u263A' }); + st.end(); + }); + + t.test('allows for decoding keys and values differently', function (st) { + var decoder = function (str, defaultDecoder, charset, type) { + if (type === 'key') { + return defaultDecoder(str, defaultDecoder, charset, type).toLowerCase(); + } + if (type === 'value') { + return defaultDecoder(str, defaultDecoder, charset, type).toUpperCase(); + } + throw 'this should never happen! type: ' + type; + }; + + st.deepEqual(qs.parse('KeY=vAlUe', { decoder: decoder }), { key: 'VALUE' }); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/formidable/node_modules/qs/test/stringify.js b/node_modules/formidable/node_modules/qs/test/stringify.js new file mode 100644 index 0000000..760d08c --- /dev/null +++ b/node_modules/formidable/node_modules/qs/test/stringify.js @@ -0,0 +1,738 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var utils = require('../lib/utils'); +var iconv = require('iconv-lite'); +var SaferBuffer = require('safer-buffer').Buffer; +var hasSymbols = require('has-symbols'); +var hasBigInt = typeof BigInt === 'function'; + +test('stringify()', function (t) { + t.test('stringifies a querystring object', function (st) { + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: 1 }), 'a=1'); + st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); + st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); + st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); + st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); + st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); + st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); + st.end(); + }); + + t.test('stringifies falsy values', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(null, { strictNullHandling: true }), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(0), ''); + st.end(); + }); + + t.test('stringifies symbols', { skip: !hasSymbols() }, function (st) { + st.equal(qs.stringify(Symbol.iterator), ''); + st.equal(qs.stringify([Symbol.iterator]), '0=Symbol%28Symbol.iterator%29'); + st.equal(qs.stringify({ a: Symbol.iterator }), 'a=Symbol%28Symbol.iterator%29'); + st.equal( + qs.stringify({ a: [Symbol.iterator] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), + 'a[]=Symbol%28Symbol.iterator%29' + ); + st.end(); + }); + + t.test('stringifies bigints', { skip: !hasBigInt }, function (st) { + var three = BigInt(3); + var encodeWithN = function (value, defaultEncoder, charset) { + var result = defaultEncoder(value, defaultEncoder, charset); + return typeof value === 'bigint' ? result + 'n' : result; + }; + st.equal(qs.stringify(three), ''); + st.equal(qs.stringify([three]), '0=3'); + st.equal(qs.stringify([three], { encoder: encodeWithN }), '0=3n'); + st.equal(qs.stringify({ a: three }), 'a=3'); + st.equal(qs.stringify({ a: three }, { encoder: encodeWithN }), 'a=3n'); + st.equal( + qs.stringify({ a: [three] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), + 'a[]=3' + ); + st.equal( + qs.stringify({ a: [three] }, { encodeValuesOnly: true, encoder: encodeWithN, arrayFormat: 'brackets' }), + 'a[]=3n' + ); + st.end(); + }); + + t.test('adds query prefix', function (st) { + st.equal(qs.stringify({ a: 'b' }, { addQueryPrefix: true }), '?a=b'); + st.end(); + }); + + t.test('with query prefix, outputs blank string given an empty object', function (st) { + st.equal(qs.stringify({}, { addQueryPrefix: true }), ''); + st.end(); + }); + + t.test('stringifies nested falsy values', function (st) { + st.equal(qs.stringify({ a: { b: { c: null } } }), 'a%5Bb%5D%5Bc%5D='); + st.equal(qs.stringify({ a: { b: { c: null } } }, { strictNullHandling: true }), 'a%5Bb%5D%5Bc%5D'); + st.equal(qs.stringify({ a: { b: { c: false } } }), 'a%5Bb%5D%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies a nested object', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies a nested object with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); + st.end(); + }); + + t.test('stringifies an array value', function (st) { + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }), + 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma' }), + 'a=b%2Cc%2Cd', + 'comma => comma' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'default => indices' + ); + st.end(); + }); + + t.test('omits nulls when asked', function (st) { + st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); + st.end(); + }); + + t.test('omits nested nulls when asked', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('omits array indices when asked', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); + st.end(); + }); + + t.test('stringifies a nested array value', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'indices' }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'brackets' }), 'a%5Bb%5D%5B%5D=c&a%5Bb%5D%5B%5D=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'comma' }), 'a%5Bb%5D=c%2Cd'); // a[b]=c,d + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.end(); + }); + + t.test('stringifies a nested array value with dots notation', function (st) { + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a.b[0]=c&a.b[1]=d', + 'indices: stringifies with dots + indices' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a.b[]=c&a.b[]=d', + 'brackets: stringifies with dots + brackets' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'comma' } + ), + 'a.b=c,d', + 'comma: stringifies with dots + comma' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false } + ), + 'a.b[0]=c&a.b[1]=d', + 'default: stringifies with dots + indices' + ); + st.end(); + }); + + t.test('stringifies an object inside an array', function (st) { + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D=c', // a[0][b]=c + 'indices => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D=c', // a[][b]=c + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }), + 'a%5B0%5D%5Bb%5D=c', + 'default => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'indices => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D%5Bc%5D%5B%5D=1', + 'brackets => brackets' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an array with mixed objects and primitives', function (st) { + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'indices' }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'brackets' }), + 'a[][b]=1&a[]=2&a[]=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an object inside an array with dots notation', function (st) { + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b=c', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b=c', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false } + ), + 'a[0].b=c', + 'default => indices' + ); + + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b.c[0]=1', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b.c[]=1', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false } + ), + 'a[0].b.c[0]=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('does not omit object keys when indices = false', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when indices=true', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); + st.end(); + }); + + t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); + st.end(); + }); + + t.test('stringifies a complicated object', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies an empty value', function (st) { + st.equal(qs.stringify({ a: '' }), 'a='); + st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); + + st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); + st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); + + st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); + + st.end(); + }); + + t.test('stringifies a null object', { skip: !Object.create }, function (st) { + var obj = Object.create(null); + obj.a = 'b'; + st.equal(qs.stringify(obj), 'a=b'); + st.end(); + }); + + t.test('returns an empty string for invalid input', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(''), ''); + st.end(); + }); + + t.test('stringifies an object with a null object as a child', { skip: !Object.create }, function (st) { + var obj = { a: Object.create(null) }; + + obj.a.b = 'c'; + st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('drops keys with a value of undefined', function (st) { + st.equal(qs.stringify({ a: undefined }), ''); + + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); + st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); + st.end(); + }); + + t.test('url encodes values', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('stringifies a date', function (st) { + var now = new Date(); + var str = 'a=' + encodeURIComponent(now.toISOString()); + st.equal(qs.stringify({ a: now }), str); + st.end(); + }); + + t.test('stringifies the weird object from qs', function (st) { + st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); + st.end(); + }); + + t.test('skips properties that are part of the object prototype', function (st) { + Object.prototype.crash = 'test'; + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + delete Object.prototype.crash; + st.end(); + }); + + t.test('stringifies boolean values', function (st) { + st.equal(qs.stringify({ a: true }), 'a=true'); + st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); + st.equal(qs.stringify({ b: false }), 'b=false'); + st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies buffer values', function (st) { + st.equal(qs.stringify({ a: SaferBuffer.from('test') }), 'a=test'); + st.equal(qs.stringify({ a: { b: SaferBuffer.from('test') } }), 'a%5Bb%5D=test'); + st.end(); + }); + + t.test('stringifies an object using an alternative delimiter', function (st) { + st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); + st.end(); + }); + + t.test('doesn\'t blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.stringify({ a: 'b', c: 'd' }); + global.Buffer = tempBuffer; + st.equal(result, 'a=b&c=d'); + st.end(); + }); + + t.test('selects properties when filter=array', function (st) { + st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); + st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); + + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' } + ), + 'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2] } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('supports custom representations when filter=function', function (st) { + var calls = 0; + var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; + var filterFunc = function (prefix, value) { + calls += 1; + if (calls === 1) { + st.equal(prefix, '', 'prefix is empty'); + st.equal(value, obj); + } else if (prefix === 'c') { + return void 0; + } else if (value instanceof Date) { + st.equal(prefix, 'e[f]'); + return value.getTime(); + } + return value; + }; + + st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); + st.equal(calls, 5); + st.end(); + }); + + t.test('can disable uri encoding', function (st) { + st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); + st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); + st.end(); + }); + + t.test('can sort the keys', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); + st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); + st.end(); + }); + + t.test('can sort the keys at depth 3 or more too', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: sort, encode: false } + ), + 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb' + ); + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: null, encode: false } + ), + 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b' + ); + st.end(); + }); + + t.test('can stringify with custom encoding', function (st) { + st.equal(qs.stringify({ 県: '大阪府', '': '' }, { + encoder: function (str) { + if (str.length === 0) { + return ''; + } + var buf = iconv.encode(str, 'shiftjis'); + var result = []; + for (var i = 0; i < buf.length; ++i) { + result.push(buf.readUInt8(i).toString(16)); + } + return '%' + result.join('%'); + } + }), '%8c%a7=%91%e5%8d%e3%95%7b&='); + st.end(); + }); + + t.test('receives the default encoder as a second argument', function (st) { + st.plan(2); + qs.stringify({ a: 1 }, { + encoder: function (str, defaultEncoder) { + st.equal(defaultEncoder, utils.encode); + } + }); + st.end(); + }); + + t.test('throws error with wrong encoder', function (st) { + st['throws'](function () { + qs.stringify({}, { encoder: 'string' }); + }, new TypeError('Encoder has to be a function.')); + st.end(); + }); + + t.test('can use custom encoder for a buffer object', { skip: typeof Buffer === 'undefined' }, function (st) { + st.equal(qs.stringify({ a: SaferBuffer.from([1]) }, { + encoder: function (buffer) { + if (typeof buffer === 'string') { + return buffer; + } + return String.fromCharCode(buffer.readUInt8(0) + 97); + } + }), 'a=b'); + + st.equal(qs.stringify({ a: SaferBuffer.from('a b') }, { + encoder: function (buffer) { + return buffer; + } + }), 'a=a b'); + st.end(); + }); + + t.test('serializeDate option', function (st) { + var date = new Date(); + st.equal( + qs.stringify({ a: date }), + 'a=' + date.toISOString().replace(/:/g, '%3A'), + 'default is toISOString' + ); + + var mutatedDate = new Date(); + mutatedDate.toISOString = function () { + throw new SyntaxError(); + }; + st['throws'](function () { + mutatedDate.toISOString(); + }, SyntaxError); + st.equal( + qs.stringify({ a: mutatedDate }), + 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), + 'toISOString works even when method is not locally present' + ); + + var specificDate = new Date(6); + st.equal( + qs.stringify( + { a: specificDate }, + { serializeDate: function (d) { return d.getTime() * 7; } } + ), + 'a=42', + 'custom serializeDate function called' + ); + + st.end(); + }); + + t.test('RFC 1738 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC1738 }), 'a=b+c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC1738 }), 'a+b=c+d'); + st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }, { format: qs.formats.RFC1738 }), 'a+b=a+b'); + st.end(); + }); + + t.test('RFC 3986 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC3986 }), 'a=b%20c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC3986 }), 'a%20b=c%20d'); + st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }, { format: qs.formats.RFC3986 }), 'a%20b=a%20b'); + st.end(); + }); + + t.test('Backward compatibility to RFC 3986', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }), 'a%20b=a%20b'); + st.end(); + }); + + t.test('Edge cases and unknown formats', function (st) { + ['UFO1234', false, 1234, null, {}, []].forEach( + function (format) { + st['throws']( + function () { + qs.stringify({ a: 'b c' }, { format: format }); + }, + new TypeError('Unknown format option provided.') + ); + } + ); + st.end(); + }); + + t.test('encodeValuesOnly', function (st) { + st.equal( + qs.stringify( + { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, + { encodeValuesOnly: true } + ), + 'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h' + ); + st.equal( + qs.stringify( + { a: 'b', c: ['d', 'e'], f: [['g'], ['h']] } + ), + 'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h' + ); + st.end(); + }); + + t.test('encodeValuesOnly - strictNullHandling', function (st) { + st.equal( + qs.stringify( + { a: { b: null } }, + { encodeValuesOnly: true, strictNullHandling: true } + ), + 'a[b]' + ); + st.end(); + }); + + t.test('throws if an invalid charset is specified', function (st) { + st['throws'](function () { + qs.stringify({ a: 'b' }, { charset: 'foobar' }); + }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); + st.end(); + }); + + t.test('respects a charset of iso-8859-1', function (st) { + st.equal(qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }), '%E6=%E6'); + st.end(); + }); + + t.test('encodes unrepresentable chars as numeric entities in iso-8859-1 mode', function (st) { + st.equal(qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }), 'a=%26%239786%3B'); + st.end(); + }); + + t.test('respects an explicit charset of utf-8 (the default)', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charset: 'utf-8' }), 'a=%C3%A6'); + st.end(); + }); + + t.test('adds the right sentinel when instructed to and the charset is utf-8', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'utf-8' }), 'utf8=%E2%9C%93&a=%C3%A6'); + st.end(); + }); + + t.test('adds the right sentinel when instructed to and the charset is iso-8859-1', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }), 'utf8=%26%2310003%3B&a=%E6'); + st.end(); + }); + + t.test('does not mutate the options argument', function (st) { + var options = {}; + qs.stringify({}, options); + st.deepEqual(options, {}); + st.end(); + }); + + t.test('strictNullHandling works with custom filter', function (st) { + var filter = function (prefix, value) { + return value; + }; + + var options = { strictNullHandling: true, filter: filter }; + st.equal(qs.stringify({ key: null }, options), 'key'); + st.end(); + }); + + t.test('strictNullHandling works with null serializeDate', function (st) { + var serializeDate = function () { + return null; + }; + var options = { strictNullHandling: true, serializeDate: serializeDate }; + var date = new Date(); + st.equal(qs.stringify({ key: date }, options), 'key'); + st.end(); + }); + + t.test('allows for encoding keys and values differently', function (st) { + var encoder = function (str, defaultEncoder, charset, type) { + if (type === 'key') { + return defaultEncoder(str, defaultEncoder, charset, type).toLowerCase(); + } + if (type === 'value') { + return defaultEncoder(str, defaultEncoder, charset, type).toUpperCase(); + } + throw 'this should never happen! type: ' + type; + }; + + st.deepEqual(qs.stringify({ KeY: 'vAlUe' }, { encoder: encoder }), 'key=VALUE'); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/formidable/node_modules/qs/test/utils.js b/node_modules/formidable/node_modules/qs/test/utils.js new file mode 100644 index 0000000..aa84dfd --- /dev/null +++ b/node_modules/formidable/node_modules/qs/test/utils.js @@ -0,0 +1,136 @@ +'use strict'; + +var test = require('tape'); +var inspect = require('object-inspect'); +var SaferBuffer = require('safer-buffer').Buffer; +var forEach = require('for-each'); +var utils = require('../lib/utils'); + +test('merge()', function (t) { + t.deepEqual(utils.merge(null, true), [null, true], 'merges true into null'); + + t.deepEqual(utils.merge(null, [42]), [null, 42], 'merges null into an array'); + + t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); + + var oneMerged = utils.merge({ foo: 'bar' }, { foo: { first: '123' } }); + t.deepEqual(oneMerged, { foo: ['bar', { first: '123' }] }, 'merges a standalone and an object into an array'); + + var twoMerged = utils.merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } }); + t.deepEqual(twoMerged, { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, 'merges a standalone and two objects into an array'); + + var sandwiched = utils.merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' }); + t.deepEqual(sandwiched, { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, 'merges an object sandwiched by two standalones into an array'); + + var nestedArrays = utils.merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] }); + t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] }); + + var noOptionsNonObjectSource = utils.merge({ foo: 'baz' }, 'bar'); + t.deepEqual(noOptionsNonObjectSource, { foo: 'baz', bar: true }); + + t.test( + 'avoids invoking array setters unnecessarily', + { skip: typeof Object.defineProperty !== 'function' }, + function (st) { + var setCount = 0; + var getCount = 0; + var observed = []; + Object.defineProperty(observed, 0, { + get: function () { + getCount += 1; + return { bar: 'baz' }; + }, + set: function () { setCount += 1; } + }); + utils.merge(observed, [null]); + st.equal(setCount, 0); + st.equal(getCount, 1); + observed[0] = observed[0]; // eslint-disable-line no-self-assign + st.equal(setCount, 1); + st.equal(getCount, 2); + st.end(); + } + ); + + t.end(); +}); + +test('assign()', function (t) { + var target = { a: 1, b: 2 }; + var source = { b: 3, c: 4 }; + var result = utils.assign(target, source); + + t.equal(result, target, 'returns the target'); + t.deepEqual(target, { a: 1, b: 3, c: 4 }, 'target and source are merged'); + t.deepEqual(source, { b: 3, c: 4 }, 'source is untouched'); + + t.end(); +}); + +test('combine()', function (t) { + t.test('both arrays', function (st) { + var a = [1]; + var b = [2]; + var combined = utils.combine(a, b); + + st.deepEqual(a, [1], 'a is not mutated'); + st.deepEqual(b, [2], 'b is not mutated'); + st.notEqual(a, combined, 'a !== combined'); + st.notEqual(b, combined, 'b !== combined'); + st.deepEqual(combined, [1, 2], 'combined is a + b'); + + st.end(); + }); + + t.test('one array, one non-array', function (st) { + var aN = 1; + var a = [aN]; + var bN = 2; + var b = [bN]; + + var combinedAnB = utils.combine(aN, b); + st.deepEqual(b, [bN], 'b is not mutated'); + st.notEqual(aN, combinedAnB, 'aN + b !== aN'); + st.notEqual(a, combinedAnB, 'aN + b !== a'); + st.notEqual(bN, combinedAnB, 'aN + b !== bN'); + st.notEqual(b, combinedAnB, 'aN + b !== b'); + st.deepEqual([1, 2], combinedAnB, 'first argument is array-wrapped when not an array'); + + var combinedABn = utils.combine(a, bN); + st.deepEqual(a, [aN], 'a is not mutated'); + st.notEqual(aN, combinedABn, 'a + bN !== aN'); + st.notEqual(a, combinedABn, 'a + bN !== a'); + st.notEqual(bN, combinedABn, 'a + bN !== bN'); + st.notEqual(b, combinedABn, 'a + bN !== b'); + st.deepEqual([1, 2], combinedABn, 'second argument is array-wrapped when not an array'); + + st.end(); + }); + + t.test('neither is an array', function (st) { + var combined = utils.combine(1, 2); + st.notEqual(1, combined, '1 + 2 !== 1'); + st.notEqual(2, combined, '1 + 2 !== 2'); + st.deepEqual([1, 2], combined, 'both arguments are array-wrapped when not an array'); + + st.end(); + }); + + t.end(); +}); + +test('isBuffer()', function (t) { + forEach([null, undefined, true, false, '', 'abc', 42, 0, NaN, {}, [], function () {}, /a/g], function (x) { + t.equal(utils.isBuffer(x), false, inspect(x) + ' is not a buffer'); + }); + + var fakeBuffer = { constructor: Buffer }; + t.equal(utils.isBuffer(fakeBuffer), false, 'fake buffer is not a buffer'); + + var saferBuffer = SaferBuffer.from('abc'); + t.equal(utils.isBuffer(saferBuffer), true, 'SaferBuffer instance is a buffer'); + + var buffer = Buffer.from && Buffer.alloc ? Buffer.from('abc') : new Buffer('abc'); + t.equal(utils.isBuffer(buffer), true, 'real Buffer instance is a buffer'); + t.end(); +}); diff --git a/node_modules/formidable/package.json b/node_modules/formidable/package.json new file mode 100644 index 0000000..79ed15e --- /dev/null +++ b/node_modules/formidable/package.json @@ -0,0 +1,98 @@ +{ + "name": "formidable", + "version": "2.0.1", + "license": "MIT", + "description": "A node.js module for parsing form data, especially file uploads.", + "homepage": "https://github.com/node-formidable/formidable", + "funding": "https://ko-fi.com/tunnckoCore/commissions", + "repository": "node-formidable/formidable", + "main": "./src/index.js", + "files": [ + "src" + ], + "publishConfig": { + "access": "public", + "tag": "v2" + }, + "scripts": { + "bench": "node benchmark", + "fmt": "yarn run fmt:prepare '**/*'", + "fmt:prepare": "prettier --write", + "lint": "yarn run lint:prepare .", + "lint:prepare": "eslint --cache --fix --quiet --format codeframe", + "reinstall": "del-cli ./node_modules ./yarn.lock", + "postreinstall": "yarn setup", + "setup": "yarn", + "pretest": "del-cli ./test/tmp && make-dir ./test/tmp", + "test": "jest --coverage", + "pretest:ci": "yarn run pretest", + "test:ci": "nyc jest --coverage", + "test:jest": "jest --coverage" + }, + "dependencies": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "devDependencies": { + "@commitlint/cli": "8.3.5", + "@commitlint/config-conventional": "8.3.4", + "@tunnckocore/prettier-config": "1.3.8", + "del-cli": "3.0.0", + "eslint": "6.8.0", + "eslint-config-airbnb-base": "14.1.0", + "eslint-config-prettier": "6.11.0", + "eslint-plugin-import": "2.20.2", + "eslint-plugin-prettier": "3.1.3", + "express": "4.17.1", + "husky": "4.2.5", + "jest": "25.4.0", + "koa": "2.11.0", + "lint-staged": "10.2.7", + "make-dir-cli": "2.0.0", + "nyc": "15.0.1", + "prettier": "2.0.5", + "prettier-plugin-pkgjson": "0.2.8", + "request": "2.88.2", + "supertest": "4.0.2" + }, + "jest": { + "verbose": true + }, + "husky": { + "hooks": { + "pre-commit": "git status --porcelain && yarn lint-staged", + "commit-msg": "yarn commitlint -E HUSKY_GIT_PARAMS" + } + }, + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "lint-staged": { + "!*.{js,jsx,ts,tsx}": [ + "yarn run fmt:prepare" + ], + "*.{js,jsx,ts,tsx}": [ + "yarn run lint" + ] + }, + "renovate": { + "extends": [ + "@tunnckocore", + ":pinAllExceptPeerDependencies" + ] + }, + "keywords": [ + "multipart", + "form", + "data", + "querystring", + "www", + "json", + "ulpoad", + "file" + ] +} diff --git a/node_modules/formidable/src/Formidable.js b/node_modules/formidable/src/Formidable.js new file mode 100644 index 0000000..0542700 --- /dev/null +++ b/node_modules/formidable/src/Formidable.js @@ -0,0 +1,617 @@ +/* eslint-disable class-methods-use-this */ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const os = require('os'); +const path = require('path'); +const hexoid = require('hexoid'); +const once = require('once'); +const dezalgo = require('dezalgo'); +const { EventEmitter } = require('events'); +const { StringDecoder } = require('string_decoder'); +const qs = require('qs'); + +const toHexoId = hexoid(25); +const DEFAULT_OPTIONS = { + maxFields: 1000, + maxFieldsSize: 20 * 1024 * 1024, + maxFileSize: 200 * 1024 * 1024, + minFileSize: 1, + allowEmptyFiles: true, + keepExtensions: false, + encoding: 'utf-8', + hashAlgorithm: false, + uploadDir: os.tmpdir(), + multiples: false, + enabledPlugins: ['octetstream', 'querystring', 'multipart', 'json'], + fileWriteStreamHandler: null, + defaultInvalidName: 'invalid-name', + filter: function () { + return true; + }, +}; + +const PersistentFile = require('./PersistentFile'); +const VolatileFile = require('./VolatileFile'); +const DummyParser = require('./parsers/Dummy'); +const MultipartParser = require('./parsers/Multipart'); +const errors = require('./FormidableError.js'); + +const { FormidableError } = errors; + +function hasOwnProp(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +class IncomingForm extends EventEmitter { + constructor(options = {}) { + super(); + + this.options = { ...DEFAULT_OPTIONS, ...options }; + + const dir = path.resolve( + this.options.uploadDir || this.options.uploaddir || os.tmpdir(), + ); + + this.uploaddir = dir; + this.uploadDir = dir; + + // initialize with null + [ + 'error', + 'headers', + 'type', + 'bytesExpected', + 'bytesReceived', + '_parser', + ].forEach((key) => { + this[key] = null; + }); + + this._setUpRename(); + + this._flushing = 0; + this._fieldsSize = 0; + this._fileSize = 0; + this._plugins = []; + this.openedFiles = []; + + this.options.enabledPlugins = [] + .concat(this.options.enabledPlugins) + .filter(Boolean); + + if (this.options.enabledPlugins.length === 0) { + throw new FormidableError( + 'expect at least 1 enabled builtin plugin, see options.enabledPlugins', + errors.missingPlugin, + ); + } + + this.options.enabledPlugins.forEach((pluginName) => { + const plgName = pluginName.toLowerCase(); + // eslint-disable-next-line import/no-dynamic-require, global-require + this.use(require(path.join(__dirname, 'plugins', `${plgName}.js`))); + }); + + this._setUpMaxFields(); + } + + use(plugin) { + if (typeof plugin !== 'function') { + throw new FormidableError( + '.use: expect `plugin` to be a function', + errors.pluginFunction, + ); + } + this._plugins.push(plugin.bind(this)); + return this; + } + + parse(req, cb) { + this.pause = () => { + try { + req.pause(); + } catch (err) { + // the stream was destroyed + if (!this.ended) { + // before it was completed, crash & burn + this._error(err); + } + return false; + } + return true; + }; + + this.resume = () => { + try { + req.resume(); + } catch (err) { + // the stream was destroyed + if (!this.ended) { + // before it was completed, crash & burn + this._error(err); + } + return false; + } + + return true; + }; + + // Setup callback first, so we don't miss anything from data events emitted immediately. + if (cb) { + const callback = once(dezalgo(cb)); + const fields = {}; + let mockFields = ''; + const files = {}; + + this.on('field', (name, value) => { + if ( + this.options.multiples && + (this.type === 'multipart' || this.type === 'urlencoded') + ) { + const mObj = { [name]: value }; + mockFields = mockFields + ? `${mockFields}&${qs.stringify(mObj)}` + : `${qs.stringify(mObj)}`; + } else { + fields[name] = value; + } + }); + this.on('file', (name, file) => { + // TODO: too much nesting + if (this.options.multiples) { + if (hasOwnProp(files, name)) { + if (!Array.isArray(files[name])) { + files[name] = [files[name]]; + } + files[name].push(file); + } else { + files[name] = file; + } + } else { + files[name] = file; + } + }); + this.on('error', (err) => { + callback(err, fields, files); + }); + this.on('end', () => { + if (this.options.multiples) { + Object.assign(fields, qs.parse(mockFields)); + } + callback(null, fields, files); + }); + } + + // Parse headers and setup the parser, ready to start listening for data. + this.writeHeaders(req.headers); + + // Start listening for data. + req + .on('error', (err) => { + this._error(err); + }) + .on('aborted', () => { + this.emit('aborted'); + this._error(new FormidableError('Request aborted', errors.aborted)); + }) + .on('data', (buffer) => { + try { + this.write(buffer); + } catch (err) { + this._error(err); + } + }) + .on('end', () => { + if (this.error) { + return; + } + if (this._parser) { + this._parser.end(); + } + this._maybeEnd(); + }); + + return this; + } + + writeHeaders(headers) { + this.headers = headers; + this._parseContentLength(); + this._parseContentType(); + + if (!this._parser) { + this._error( + new FormidableError( + 'no parser found', + errors.noParser, + 415, // Unsupported Media Type + ), + ); + return; + } + + this._parser.once('error', (error) => { + this._error(error); + }); + } + + write(buffer) { + if (this.error) { + return null; + } + if (!this._parser) { + this._error( + new FormidableError('uninitialized parser', errors.uninitializedParser), + ); + return null; + } + + this.bytesReceived += buffer.length; + this.emit('progress', this.bytesReceived, this.bytesExpected); + + this._parser.write(buffer); + + return this.bytesReceived; + } + + pause() { + // this does nothing, unless overwritten in IncomingForm.parse + return false; + } + + resume() { + // this does nothing, unless overwritten in IncomingForm.parse + return false; + } + + onPart(part) { + // this method can be overwritten by the user + this._handlePart(part); + } + + _handlePart(part) { + if (part.originalFilename && typeof part.originalFilename !== 'string') { + this._error( + new FormidableError( + `the part.originalFilename should be string when it exists`, + errors.filenameNotString, + ), + ); + return; + } + + // This MUST check exactly for undefined. You can not change it to !part.originalFilename. + + // todo: uncomment when switch tests to Jest + // console.log(part); + + // ? NOTE(@tunnckocore): no it can be any falsey value, it most probably depends on what's returned + // from somewhere else. Where recently I changed the return statements + // and such thing because code style + // ? NOTE(@tunnckocore): or even better, if there is no mimetype, then it's for sure a field + // ? NOTE(@tunnckocore): originalFilename is an empty string when a field? + if (!part.mimetype) { + let value = ''; + const decoder = new StringDecoder( + part.transferEncoding || this.options.encoding, + ); + + part.on('data', (buffer) => { + this._fieldsSize += buffer.length; + if (this._fieldsSize > this.options.maxFieldsSize) { + this._error( + new FormidableError( + `options.maxFieldsSize (${this.options.maxFieldsSize} bytes) exceeded, received ${this._fieldsSize} bytes of field data`, + errors.maxFieldsSizeExceeded, + 413, // Payload Too Large + ), + ); + return; + } + value += decoder.write(buffer); + }); + + part.on('end', () => { + this.emit('field', part.name, value); + }); + return; + } + + if (!this.options.filter(part)) { + return; + } + + this._flushing += 1; + + const newFilename = this._getNewName(part); + const filepath = this._joinDirectoryName(newFilename); + const file = this._newFile({ + newFilename, + filepath, + originalFilename: part.originalFilename, + mimetype: part.mimetype, + }); + file.on('error', (err) => { + this._error(err); + }); + this.emit('fileBegin', part.name, file); + + file.open(); + this.openedFiles.push(file); + + part.on('data', (buffer) => { + this._fileSize += buffer.length; + if (this._fileSize < this.options.minFileSize) { + this._error( + new FormidableError( + `options.minFileSize (${this.options.minFileSize} bytes) inferior, received ${this._fileSize} bytes of file data`, + errors.smallerThanMinFileSize, + 400, + ), + ); + return; + } + if (this._fileSize > this.options.maxFileSize) { + this._error( + new FormidableError( + `options.maxFileSize (${this.options.maxFileSize} bytes) exceeded, received ${this._fileSize} bytes of file data`, + errors.biggerThanMaxFileSize, + 413, + ), + ); + return; + } + if (buffer.length === 0) { + return; + } + this.pause(); + file.write(buffer, () => { + this.resume(); + }); + }); + + part.on('end', () => { + if (!this.options.allowEmptyFiles && this._fileSize === 0) { + this._error( + new FormidableError( + `options.allowEmptyFiles is false, file size should be greather than 0`, + errors.noEmptyFiles, + 400, + ), + ); + return; + } + + file.end(() => { + this._flushing -= 1; + this.emit('file', part.name, file); + this._maybeEnd(); + }); + }); + } + + // eslint-disable-next-line max-statements + _parseContentType() { + if (this.bytesExpected === 0) { + this._parser = new DummyParser(this, this.options); + return; + } + + if (!this.headers['content-type']) { + this._error( + new FormidableError( + 'bad content-type header, no content-type', + errors.missingContentType, + 400, + ), + ); + return; + } + + const results = []; + const _dummyParser = new DummyParser(this, this.options); + + // eslint-disable-next-line no-plusplus + for (let idx = 0; idx < this._plugins.length; idx++) { + const plugin = this._plugins[idx]; + + let pluginReturn = null; + + try { + pluginReturn = plugin(this, this.options) || this; + } catch (err) { + // directly throw from the `form.parse` method; + // there is no other better way, except a handle through options + const error = new FormidableError( + `plugin on index ${idx} failed with: ${err.message}`, + errors.pluginFailed, + 500, + ); + error.idx = idx; + throw error; + } + + Object.assign(this, pluginReturn); + + // todo: use Set/Map and pass plugin name instead of the `idx` index + this.emit('plugin', idx, pluginReturn); + results.push(pluginReturn); + } + + this.emit('pluginsResults', results); + + // NOTE: probably not needed, because we check options.enabledPlugins in the constructor + // if (results.length === 0 /* && results.length !== this._plugins.length */) { + // this._error( + // new Error( + // `bad content-type header, unknown content-type: ${this.headers['content-type']}`, + // ), + // ); + // } + } + + _error(err, eventName = 'error') { + // if (!err && this.error) { + // this.emit('error', this.error); + // return; + // } + if (this.error || this.ended) { + return; + } + + this.error = err; + this.emit(eventName, err); + + if (Array.isArray(this.openedFiles)) { + this.openedFiles.forEach((file) => { + file.destroy(); + }); + } + } + + _parseContentLength() { + this.bytesReceived = 0; + if (this.headers['content-length']) { + this.bytesExpected = parseInt(this.headers['content-length'], 10); + } else if (this.headers['transfer-encoding'] === undefined) { + this.bytesExpected = 0; + } + + if (this.bytesExpected !== null) { + this.emit('progress', this.bytesReceived, this.bytesExpected); + } + } + + _newParser() { + return new MultipartParser(this.options); + } + + _newFile({ filepath, originalFilename, mimetype, newFilename }) { + return this.options.fileWriteStreamHandler + ? new VolatileFile({ + newFilename, + filepath, + originalFilename, + mimetype, + createFileWriteStream: this.options.fileWriteStreamHandler, + hashAlgorithm: this.options.hashAlgorithm, + }) + : new PersistentFile({ + newFilename, + filepath, + originalFilename, + mimetype, + hashAlgorithm: this.options.hashAlgorithm, + }); + } + + _getFileName(headerValue) { + // matches either a quoted-string or a token (RFC 2616 section 19.5.1) + const m = headerValue.match( + /\bfilename=("(.*?)"|([^()<>{}[\]@,;:"?=\s/\t]+))($|;\s)/i, + ); + if (!m) return null; + + const match = m[2] || m[3] || ''; + let originalFilename = match.substr(match.lastIndexOf('\\') + 1); + originalFilename = originalFilename.replace(/%22/g, '"'); + originalFilename = originalFilename.replace(/&#([\d]{4});/g, (_, code) => + String.fromCharCode(code), + ); + + return originalFilename; + } + + _getExtension(str) { + if (!str) { + return ''; + } + + const basename = path.basename(str); + const firstDot = basename.indexOf('.'); + const lastDot = basename.lastIndexOf('.'); + const extname = path.extname(basename).replace(/(\.[a-z0-9]+).*/i, '$1'); + + if (firstDot === lastDot) { + return extname; + } + + return basename.slice(firstDot, lastDot) + extname; + } + + + + _joinDirectoryName(name) { + const newPath = path.join(this.uploadDir, name); + + // prevent directory traversal attacks + if (!newPath.startsWith(this.uploadDir)) { + return path.join(this.uploadDir, this.options.defaultInvalidName); + } + + return newPath; + } + + _setUpRename() { + const hasRename = typeof this.options.filename === 'function'; + if (hasRename) { + this._getNewName = (part) => { + let ext = ''; + let name = this.options.defaultInvalidName; + if (part.originalFilename) { + // can be null + ({ ext, name } = path.parse(part.originalFilename)); + if (this.options.keepExtensions !== true) { + ext = ''; + } + } + return this.options.filename.call(this, name, ext, part, this); + }; + } else { + this._getNewName = (part) => { + const name = toHexoId(); + + if (part && this.options.keepExtensions) { + const originalFilename = typeof part === 'string' ? part : part.originalFilename; + return `${name}${this._getExtension(originalFilename)}`; + } + + return name; + } + } + } + + _setUpMaxFields() { + if (this.options.maxFields !== 0) { + let fieldsCount = 0; + this.on('field', () => { + fieldsCount += 1; + if (fieldsCount > this.options.maxFields) { + this._error( + new FormidableError( + `options.maxFields (${this.options.maxFields}) exceeded`, + errors.maxFieldsExceeded, + 413, + ), + ); + } + }); + } + } + + _maybeEnd() { + // console.log('ended', this.ended); + // console.log('_flushing', this._flushing); + // console.log('error', this.error); + if (!this.ended || this._flushing || this.error) { + return; + } + + this.emit('end'); + } +} + +IncomingForm.DEFAULT_OPTIONS = DEFAULT_OPTIONS; +module.exports = IncomingForm; diff --git a/node_modules/formidable/src/FormidableError.js b/node_modules/formidable/src/FormidableError.js new file mode 100644 index 0000000..da5c25f --- /dev/null +++ b/node_modules/formidable/src/FormidableError.js @@ -0,0 +1,45 @@ +/* eslint-disable no-plusplus */ + +const missingPlugin = 1000; +const pluginFunction = 1001; +const aborted = 1002; +const noParser = 1003; +const uninitializedParser = 1004; +const filenameNotString = 1005; +const maxFieldsSizeExceeded = 1006; +const maxFieldsExceeded = 1007; +const smallerThanMinFileSize = 1008; +const biggerThanMaxFileSize = 1009; +const noEmptyFiles = 1010; +const missingContentType = 1011; +const malformedMultipart = 1012; +const missingMultipartBoundary = 1013; +const unknownTransferEncoding = 1014; + +const FormidableError = class extends Error { + constructor(message, internalCode, httpCode = 500) { + super(message); + this.code = internalCode; + this.httpCode = httpCode; + } +}; + +module.exports = { + missingPlugin, + pluginFunction, + aborted, + noParser, + uninitializedParser, + filenameNotString, + maxFieldsSizeExceeded, + maxFieldsExceeded, + smallerThanMinFileSize, + biggerThanMaxFileSize, + noEmptyFiles, + missingContentType, + malformedMultipart, + missingMultipartBoundary, + unknownTransferEncoding, + + FormidableError, +}; diff --git a/node_modules/formidable/src/PersistentFile.js b/node_modules/formidable/src/PersistentFile.js new file mode 100644 index 0000000..3a28aa7 --- /dev/null +++ b/node_modules/formidable/src/PersistentFile.js @@ -0,0 +1,87 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const fs = require('fs'); +const crypto = require('crypto'); +const { EventEmitter } = require('events'); + +class PersistentFile extends EventEmitter { + constructor({ filepath, newFilename, originalFilename, mimetype, hashAlgorithm }) { + super(); + + this.lastModifiedDate = null; + Object.assign(this, { filepath, newFilename, originalFilename, mimetype, hashAlgorithm }); + + this.size = 0; + this._writeStream = null; + + if (typeof this.hashAlgorithm === 'string') { + this.hash = crypto.createHash(this.hashAlgorithm); + } else { + this.hash = null; + } + } + + open() { + this._writeStream = new fs.WriteStream(this.filepath); + this._writeStream.on('error', (err) => { + this.emit('error', err); + }); + } + + toJSON() { + const json = { + size: this.size, + filepath: this.filepath, + newFilename: this.newFilename, + mimetype: this.mimetype, + mtime: this.lastModifiedDate, + length: this.length, + originalFilename: this.originalFilename, + }; + if (this.hash && this.hash !== '') { + json.hash = this.hash; + } + return json; + } + + toString() { + return `PersistentFile: ${this._file.newFilename}, Original: ${this._file.originalFilename}, Path: ${this._file.filepath}`; + } + + write(buffer, cb) { + if (this.hash) { + this.hash.update(buffer); + } + + if (this._writeStream.closed) { + cb(); + return; + } + + this._writeStream.write(buffer, () => { + this.lastModifiedDate = new Date(); + this.size += buffer.length; + this.emit('progress', this.size); + cb(); + }); + } + + end(cb) { + if (this.hash) { + this.hash = this.hash.digest('hex'); + } + this._writeStream.end(() => { + this.emit('end'); + cb(); + }); + } + + destroy() { + this._writeStream.destroy(); + fs.unlink(this.filepath, () => {}); + } +} + +module.exports = PersistentFile; diff --git a/node_modules/formidable/src/VolatileFile.js b/node_modules/formidable/src/VolatileFile.js new file mode 100644 index 0000000..01af428 --- /dev/null +++ b/node_modules/formidable/src/VolatileFile.js @@ -0,0 +1,82 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const crypto = require('crypto'); +const { EventEmitter } = require('events'); + +class VolatileFile extends EventEmitter { + constructor({ filepath, newFilename, originalFilename, mimetype, hashAlgorithm, createFileWriteStream }) { + super(); + + this.lastModifiedDate = null; + Object.assign(this, { filepath, newFilename, originalFilename, mimetype, hashAlgorithm, createFileWriteStream }); + + this.size = 0; + this._writeStream = null; + + if (typeof this.hashAlgorithm === 'string') { + this.hash = crypto.createHash(this.hashAlgorithm); + } else { + this.hash = null; + } + } + + open() { + this._writeStream = this.createFileWriteStream(this); + this._writeStream.on('error', (err) => { + this.emit('error', err); + }); + } + + destroy() { + this._writeStream.destroy(); + } + + toJSON() { + const json = { + size: this.size, + newFilename: this.newFilename, + length: this.length, + originalFilename: this.originalFilename, + mimetype: this.mimetype, + }; + if (this.hash && this.hash !== '') { + json.hash = this.hash; + } + return json; + } + + toString() { + return `VolatileFile: ${this.originalFilename}`; + } + + write(buffer, cb) { + if (this.hash) { + this.hash.update(buffer); + } + + if (this._writeStream.closed || this._writeStream.destroyed) { + cb(); + return; + } + + this._writeStream.write(buffer, () => { + this.size += buffer.length; + this.emit('progress', this.size); + cb(); + }); + } + + end(cb) { + if (this.hash) { + this.hash = this.hash.digest('hex'); + } + this._writeStream.end(() => { + this.emit('end'); + cb(); + }); + } +} + +module.exports = VolatileFile; diff --git a/node_modules/formidable/src/index.js b/node_modules/formidable/src/index.js new file mode 100644 index 0000000..bf555a9 --- /dev/null +++ b/node_modules/formidable/src/index.js @@ -0,0 +1,38 @@ +'use strict'; + +const PersistentFile = require('./PersistentFile'); +const VolatileFile = require('./VolatileFile'); +const Formidable = require('./Formidable'); +const FormidableError = require('./FormidableError'); + +const plugins = require('./plugins/index'); +const parsers = require('./parsers/index'); + +// make it available without requiring the `new` keyword +// if you want it access `const formidable.IncomingForm` as v1 +const formidable = (...args) => new Formidable(...args); + +module.exports = Object.assign(formidable, { + errors: FormidableError, + File: PersistentFile, + PersistentFile, + VolatileFile, + Formidable, + formidable, + + // alias + IncomingForm: Formidable, + + // parsers + ...parsers, + parsers, + + // misc + defaultOptions: Formidable.DEFAULT_OPTIONS, + enabledPlugins: Formidable.DEFAULT_OPTIONS.enabledPlugins, + + // plugins + plugins: { + ...plugins, + }, +}); diff --git a/node_modules/formidable/src/parsers/Dummy.js b/node_modules/formidable/src/parsers/Dummy.js new file mode 100644 index 0000000..6340959 --- /dev/null +++ b/node_modules/formidable/src/parsers/Dummy.js @@ -0,0 +1,21 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const { Transform } = require('stream'); + +class DummyParser extends Transform { + constructor(incomingForm, options = {}) { + super(); + this.globalOptions = { ...options }; + this.incomingForm = incomingForm; + } + + _flush(callback) { + this.incomingForm.ended = true; + this.incomingForm._maybeEnd(); + callback(); + } +} + +module.exports = DummyParser; diff --git a/node_modules/formidable/src/parsers/JSON.js b/node_modules/formidable/src/parsers/JSON.js new file mode 100644 index 0000000..9a096c2 --- /dev/null +++ b/node_modules/formidable/src/parsers/JSON.js @@ -0,0 +1,35 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const { Transform } = require('stream'); + +class JSONParser extends Transform { + constructor(options = {}) { + super({ readableObjectMode: true }); + this.chunks = []; + this.globalOptions = { ...options }; + } + + _transform(chunk, encoding, callback) { + this.chunks.push(String(chunk)); // todo consider using a string decoder + callback(); + } + + _flush(callback) { + try { + const fields = JSON.parse(this.chunks.join('')); + Object.keys(fields).forEach((key) => { + const value = fields[key]; + this.push({ key, value }); + }); + } catch (e) { + callback(e); + return; + } + this.chunks = null; + callback(); + } +} + +module.exports = JSONParser; diff --git a/node_modules/formidable/src/parsers/Multipart.js b/node_modules/formidable/src/parsers/Multipart.js new file mode 100644 index 0000000..23a298a --- /dev/null +++ b/node_modules/formidable/src/parsers/Multipart.js @@ -0,0 +1,347 @@ +/* eslint-disable no-fallthrough */ +/* eslint-disable no-bitwise */ +/* eslint-disable no-plusplus */ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const { Transform } = require('stream'); +const errors = require('../FormidableError.js'); + +const { FormidableError } = errors; + +let s = 0; +const STATE = { + PARSER_UNINITIALIZED: s++, + START: s++, + START_BOUNDARY: s++, + HEADER_FIELD_START: s++, + HEADER_FIELD: s++, + HEADER_VALUE_START: s++, + HEADER_VALUE: s++, + HEADER_VALUE_ALMOST_DONE: s++, + HEADERS_ALMOST_DONE: s++, + PART_DATA_START: s++, + PART_DATA: s++, + PART_END: s++, + END: s++, +}; + +let f = 1; +const FBOUNDARY = { PART_BOUNDARY: f, LAST_BOUNDARY: (f *= 2) }; + +const LF = 10; +const CR = 13; +const SPACE = 32; +const HYPHEN = 45; +const COLON = 58; +const A = 97; +const Z = 122; + +function lower(c) { + return c | 0x20; +} + +exports.STATES = {}; + +Object.keys(STATE).forEach((stateName) => { + exports.STATES[stateName] = STATE[stateName]; +}); + +class MultipartParser extends Transform { + constructor(options = {}) { + super({ readableObjectMode: true }); + this.boundary = null; + this.boundaryChars = null; + this.lookbehind = null; + this.bufferLength = 0; + this.state = STATE.PARSER_UNINITIALIZED; + + this.globalOptions = { ...options }; + this.index = null; + this.flags = 0; + } + + _flush(done) { + if ( + (this.state === STATE.HEADER_FIELD_START && this.index === 0) || + (this.state === STATE.PART_DATA && this.index === this.boundary.length) + ) { + this._handleCallback('partEnd'); + this._handleCallback('end'); + done(); + } else if (this.state !== STATE.END) { + done( + new FormidableError( + `MultipartParser.end(): stream ended unexpectedly: ${this.explain()}`, + errors.malformedMultipart, + 400, + ), + ); + } + } + + initWithBoundary(str) { + this.boundary = Buffer.from(`\r\n--${str}`); + this.lookbehind = Buffer.alloc(this.boundary.length + 8); + this.state = STATE.START; + this.boundaryChars = {}; + + for (let i = 0; i < this.boundary.length; i++) { + this.boundaryChars[this.boundary[i]] = true; + } + } + + // eslint-disable-next-line max-params + _handleCallback(name, buf, start, end) { + if (start !== undefined && start === end) { + return; + } + this.push({ name, buffer: buf, start, end }); + } + + // eslint-disable-next-line max-statements + _transform(buffer, _, done) { + let i = 0; + let prevIndex = this.index; + let { index, state, flags } = this; + const { lookbehind, boundary, boundaryChars } = this; + const boundaryLength = boundary.length; + const boundaryEnd = boundaryLength - 1; + this.bufferLength = buffer.length; + let c = null; + let cl = null; + + const setMark = (name, idx) => { + this[`${name}Mark`] = typeof idx === 'number' ? idx : i; + }; + + const clearMarkSymbol = (name) => { + delete this[`${name}Mark`]; + }; + + const dataCallback = (name, shouldClear) => { + const markSymbol = `${name}Mark`; + if (!(markSymbol in this)) { + return; + } + + if (!shouldClear) { + this._handleCallback(name, buffer, this[markSymbol], buffer.length); + setMark(name, 0); + } else { + this._handleCallback(name, buffer, this[markSymbol], i); + clearMarkSymbol(name); + } + }; + + for (i = 0; i < this.bufferLength; i++) { + c = buffer[i]; + switch (state) { + case STATE.PARSER_UNINITIALIZED: + return i; + case STATE.START: + index = 0; + state = STATE.START_BOUNDARY; + case STATE.START_BOUNDARY: + if (index === boundary.length - 2) { + if (c === HYPHEN) { + flags |= FBOUNDARY.LAST_BOUNDARY; + } else if (c !== CR) { + return i; + } + index++; + break; + } else if (index - 1 === boundary.length - 2) { + if (flags & FBOUNDARY.LAST_BOUNDARY && c === HYPHEN) { + this._handleCallback('end'); + state = STATE.END; + flags = 0; + } else if (!(flags & FBOUNDARY.LAST_BOUNDARY) && c === LF) { + index = 0; + this._handleCallback('partBegin'); + state = STATE.HEADER_FIELD_START; + } else { + return i; + } + break; + } + + if (c !== boundary[index + 2]) { + index = -2; + } + if (c === boundary[index + 2]) { + index++; + } + break; + case STATE.HEADER_FIELD_START: + state = STATE.HEADER_FIELD; + setMark('headerField'); + index = 0; + case STATE.HEADER_FIELD: + if (c === CR) { + clearMarkSymbol('headerField'); + state = STATE.HEADERS_ALMOST_DONE; + break; + } + + index++; + if (c === HYPHEN) { + break; + } + + if (c === COLON) { + if (index === 1) { + // empty header field + return i; + } + dataCallback('headerField', true); + state = STATE.HEADER_VALUE_START; + break; + } + + cl = lower(c); + if (cl < A || cl > Z) { + return i; + } + break; + case STATE.HEADER_VALUE_START: + if (c === SPACE) { + break; + } + + setMark('headerValue'); + state = STATE.HEADER_VALUE; + case STATE.HEADER_VALUE: + if (c === CR) { + dataCallback('headerValue', true); + this._handleCallback('headerEnd'); + state = STATE.HEADER_VALUE_ALMOST_DONE; + } + break; + case STATE.HEADER_VALUE_ALMOST_DONE: + if (c !== LF) { + return i; + } + state = STATE.HEADER_FIELD_START; + break; + case STATE.HEADERS_ALMOST_DONE: + if (c !== LF) { + return i; + } + + this._handleCallback('headersEnd'); + state = STATE.PART_DATA_START; + break; + case STATE.PART_DATA_START: + state = STATE.PART_DATA; + setMark('partData'); + case STATE.PART_DATA: + prevIndex = index; + + if (index === 0) { + // boyer-moore derrived algorithm to safely skip non-boundary data + i += boundaryEnd; + while (i < this.bufferLength && !(buffer[i] in boundaryChars)) { + i += boundaryLength; + } + i -= boundaryEnd; + c = buffer[i]; + } + + if (index < boundary.length) { + if (boundary[index] === c) { + if (index === 0) { + dataCallback('partData', true); + } + index++; + } else { + index = 0; + } + } else if (index === boundary.length) { + index++; + if (c === CR) { + // CR = part boundary + flags |= FBOUNDARY.PART_BOUNDARY; + } else if (c === HYPHEN) { + // HYPHEN = end boundary + flags |= FBOUNDARY.LAST_BOUNDARY; + } else { + index = 0; + } + } else if (index - 1 === boundary.length) { + if (flags & FBOUNDARY.PART_BOUNDARY) { + index = 0; + if (c === LF) { + // unset the PART_BOUNDARY flag + flags &= ~FBOUNDARY.PART_BOUNDARY; + this._handleCallback('partEnd'); + this._handleCallback('partBegin'); + state = STATE.HEADER_FIELD_START; + break; + } + } else if (flags & FBOUNDARY.LAST_BOUNDARY) { + if (c === HYPHEN) { + this._handleCallback('partEnd'); + this._handleCallback('end'); + state = STATE.END; + flags = 0; + } else { + index = 0; + } + } else { + index = 0; + } + } + + if (index > 0) { + // when matching a possible boundary, keep a lookbehind reference + // in case it turns out to be a false lead + lookbehind[index - 1] = c; + } else if (prevIndex > 0) { + // if our boundary turned out to be rubbish, the captured lookbehind + // belongs to partData + this._handleCallback('partData', lookbehind, 0, prevIndex); + prevIndex = 0; + setMark('partData'); + + // reconsider the current character even so it interrupted the sequence + // it could be the beginning of a new sequence + i--; + } + + break; + case STATE.END: + break; + default: + return i; + } + } + + dataCallback('headerField'); + dataCallback('headerValue'); + dataCallback('partData'); + + this.index = index; + this.state = state; + this.flags = flags; + + done(); + return this.bufferLength; + } + + explain() { + return `state = ${MultipartParser.stateToString(this.state)}`; + } +} + +// eslint-disable-next-line consistent-return +MultipartParser.stateToString = (stateNumber) => { + // eslint-disable-next-line no-restricted-syntax, guard-for-in + for (const stateName in STATE) { + const number = STATE[stateName]; + if (number === stateNumber) return stateName; + } +}; + +module.exports = Object.assign(MultipartParser, { STATES: exports.STATES }); diff --git a/node_modules/formidable/src/parsers/OctetStream.js b/node_modules/formidable/src/parsers/OctetStream.js new file mode 100644 index 0000000..cdf55f2 --- /dev/null +++ b/node_modules/formidable/src/parsers/OctetStream.js @@ -0,0 +1,12 @@ +'use strict'; + +const { PassThrough } = require('stream'); + +class OctetStreamParser extends PassThrough { + constructor(options = {}) { + super(); + this.globalOptions = { ...options }; + } +} + +module.exports = OctetStreamParser; diff --git a/node_modules/formidable/src/parsers/Querystring.js b/node_modules/formidable/src/parsers/Querystring.js new file mode 100644 index 0000000..a0d4243 --- /dev/null +++ b/node_modules/formidable/src/parsers/Querystring.js @@ -0,0 +1,38 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const { Transform } = require('stream'); +const querystring = require('querystring'); + +// This is a buffering parser, not quite as nice as the multipart one. +// If I find time I'll rewrite this to be fully streaming as well +class QuerystringParser extends Transform { + constructor(options = {}) { + super({ readableObjectMode: true }); + this.globalOptions = { ...options }; + this.buffer = ''; + this.bufferLength = 0; + } + + _transform(buffer, encoding, callback) { + this.buffer += buffer.toString('ascii'); + this.bufferLength = this.buffer.length; + callback(); + } + + _flush(callback) { + const fields = querystring.parse(this.buffer, '&', '='); + // eslint-disable-next-line no-restricted-syntax, guard-for-in + for (const key in fields) { + this.push({ + key, + value: fields[key], + }); + } + this.buffer = ''; + callback(); + } +} + +module.exports = QuerystringParser; diff --git a/node_modules/formidable/src/parsers/StreamingQuerystring.js b/node_modules/formidable/src/parsers/StreamingQuerystring.js new file mode 100644 index 0000000..06d7577 --- /dev/null +++ b/node_modules/formidable/src/parsers/StreamingQuerystring.js @@ -0,0 +1,121 @@ +// not used +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const { Transform } = require('stream'); +const errors = require('../FormidableError.js'); + +const { FormidableError } = errors; + +const AMPERSAND = 38; +const EQUALS = 61; + +class QuerystringParser extends Transform { + constructor(options = {}) { + super({ readableObjectMode: true }); + + const { maxFieldSize } = options; + this.maxFieldLength = maxFieldSize; + this.buffer = Buffer.from(''); + this.fieldCount = 0; + this.sectionStart = 0; + this.key = ''; + this.readingKey = true; + } + + _transform(buffer, encoding, callback) { + let len = buffer.length; + if (this.buffer && this.buffer.length) { + // we have some data left over from the last write which we are in the middle of processing + len += this.buffer.length; + buffer = Buffer.concat([this.buffer, buffer], len); + } + + for (let i = this.buffer.length || 0; i < len; i += 1) { + const c = buffer[i]; + if (this.readingKey) { + // KEY, check for = + if (c === EQUALS) { + this.key = this.getSection(buffer, i); + this.readingKey = false; + this.sectionStart = i + 1; + } else if (c === AMPERSAND) { + // just key, no value. Prepare to read another key + this.emitField(this.getSection(buffer, i)); + this.sectionStart = i + 1; + } + // VALUE, check for & + } else if (c === AMPERSAND) { + this.emitField(this.key, this.getSection(buffer, i)); + this.sectionStart = i + 1; + } + + if ( + this.maxFieldLength && + i - this.sectionStart === this.maxFieldLength + ) { + callback( + new FormidableError( + `${ + this.readingKey ? 'Key' : `Value for ${this.key}` + } longer than maxFieldLength`, + ), + errors.maxFieldsSizeExceeded, + 413, + ); + } + } + + // Prepare the remaining key or value (from sectionStart to the end) for the next write() or for end() + len -= this.sectionStart; + if (len) { + // i.e. Unless the last character was a & or = + this.buffer = Buffer.from(this.buffer, 0, this.sectionStart); + } else this.buffer = null; + + this.sectionStart = 0; + callback(); + } + + _flush(callback) { + // Emit the last field + if (this.readingKey) { + // we only have a key if there's something in the buffer. We definitely have no value + if (this.buffer && this.buffer.length){ + this.emitField(this.buffer.toString('ascii')); + } + } else { + // We have a key, we may or may not have a value + this.emitField( + this.key, + this.buffer && this.buffer.length && this.buffer.toString('ascii'), + ); + } + this.buffer = ''; + callback(); + } + + getSection(buffer, i) { + if (i === this.sectionStart) return ''; + + return buffer.toString('ascii', this.sectionStart, i); + } + + emitField(key, val) { + this.key = ''; + this.readingKey = true; + this.push({ key, value: val || '' }); + } +} + +module.exports = QuerystringParser; + +// const q = new QuerystringParser({maxFieldSize: 100}); +// (async function() { +// for await (const chunk of q) { +// console.log(chunk); +// } +// })(); +// q.write("a=b&c=d") +// q.end() diff --git a/node_modules/formidable/src/parsers/index.js b/node_modules/formidable/src/parsers/index.js new file mode 100644 index 0000000..bbf9ef6 --- /dev/null +++ b/node_modules/formidable/src/parsers/index.js @@ -0,0 +1,17 @@ +'use strict'; + +const JSONParser = require('./JSON'); +const DummyParser = require('./Dummy'); +const MultipartParser = require('./Multipart'); +const OctetStreamParser = require('./OctetStream'); +const QueryStringParser = require('./Querystring'); + +Object.assign(exports, { + JSONParser, + DummyParser, + MultipartParser, + OctetStreamParser, + OctetstreamParser: OctetStreamParser, + QueryStringParser, + QuerystringParser: QueryStringParser, +}); diff --git a/node_modules/formidable/src/plugins/index.js b/node_modules/formidable/src/plugins/index.js new file mode 100644 index 0000000..cbd491a --- /dev/null +++ b/node_modules/formidable/src/plugins/index.js @@ -0,0 +1,13 @@ +'use strict'; + +const octetstream = require('./octetstream'); +const querystring = require('./querystring'); +const multipart = require('./multipart'); +const json = require('./json'); + +Object.assign(exports, { + octetstream, + querystring, + multipart, + json, +}); diff --git a/node_modules/formidable/src/plugins/json.js b/node_modules/formidable/src/plugins/json.js new file mode 100644 index 0000000..20d3b26 --- /dev/null +++ b/node_modules/formidable/src/plugins/json.js @@ -0,0 +1,38 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const JSONParser = require('../parsers/JSON'); + +// the `options` is also available through the `this.options` / `formidable.options` +module.exports = function plugin(formidable, options) { + // the `this` context is always formidable, as the first argument of a plugin + // but this allows us to customize/test each plugin + + /* istanbul ignore next */ + const self = this || formidable; + + if (/json/i.test(self.headers['content-type'])) { + init.call(self, self, options); + } +}; + +// Note that it's a good practice (but it's up to you) to use the `this.options` instead +// of the passed `options` (second) param, because when you decide +// to test the plugin you can pass custom `this` context to it (and so `this.options`) +function init(_self, _opts) { + this.type = 'json'; + + const parser = new JSONParser(this.options); + + parser.on('data', ({ key, value }) => { + this.emit('field', key, value); + }); + + parser.once('end', () => { + this.ended = true; + this._maybeEnd(); + }); + + this._parser = parser; +} diff --git a/node_modules/formidable/src/plugins/multipart.js b/node_modules/formidable/src/plugins/multipart.js new file mode 100644 index 0000000..ba07373 --- /dev/null +++ b/node_modules/formidable/src/plugins/multipart.js @@ -0,0 +1,173 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const { Stream } = require('stream'); +const MultipartParser = require('../parsers/Multipart'); +const errors = require('../FormidableError.js'); + +const { FormidableError } = errors; + +// the `options` is also available through the `options` / `formidable.options` +module.exports = function plugin(formidable, options) { + // the `this` context is always formidable, as the first argument of a plugin + // but this allows us to customize/test each plugin + + /* istanbul ignore next */ + const self = this || formidable; + + // NOTE: we (currently) support both multipart/form-data and multipart/related + const multipart = /multipart/i.test(self.headers['content-type']); + + if (multipart) { + const m = self.headers['content-type'].match( + /boundary=(?:"([^"]+)"|([^;]+))/i, + ); + if (m) { + const initMultipart = createInitMultipart(m[1] || m[2]); + initMultipart.call(self, self, options); // lgtm [js/superfluous-trailing-arguments] + } else { + const err = new FormidableError( + 'bad content-type header, no multipart boundary', + errors.missingMultipartBoundary, + 400, + ); + self._error(err); + } + } +}; + +// Note that it's a good practice (but it's up to you) to use the `this.options` instead +// of the passed `options` (second) param, because when you decide +// to test the plugin you can pass custom `this` context to it (and so `this.options`) +function createInitMultipart(boundary) { + return function initMultipart() { + this.type = 'multipart'; + + const parser = new MultipartParser(this.options); + let headerField; + let headerValue; + let part; + + parser.initWithBoundary(boundary); + + // eslint-disable-next-line max-statements, consistent-return + parser.on('data', ({ name, buffer, start, end }) => { + if (name === 'partBegin') { + part = new Stream(); + part.readable = true; + part.headers = {}; + part.name = null; + part.originalFilename = null; + part.mimetype = null; + + part.transferEncoding = this.options.encoding; + part.transferBuffer = ''; + + headerField = ''; + headerValue = ''; + } else if (name === 'headerField') { + headerField += buffer.toString(this.options.encoding, start, end); + } else if (name === 'headerValue') { + headerValue += buffer.toString(this.options.encoding, start, end); + } else if (name === 'headerEnd') { + headerField = headerField.toLowerCase(); + part.headers[headerField] = headerValue; + + // matches either a quoted-string or a token (RFC 2616 section 19.5.1) + const m = headerValue.match( + // eslint-disable-next-line no-useless-escape + /\bname=("([^"]*)"|([^\(\)<>@,;:\\"\/\[\]\?=\{\}\s\t/]+))/i, + ); + if (headerField === 'content-disposition') { + if (m) { + part.name = m[2] || m[3] || ''; + } + + part.originalFilename = this._getFileName(headerValue); + } else if (headerField === 'content-type') { + part.mimetype = headerValue; + } else if (headerField === 'content-transfer-encoding') { + part.transferEncoding = headerValue.toLowerCase(); + } + + headerField = ''; + headerValue = ''; + } else if (name === 'headersEnd') { + switch (part.transferEncoding) { + case 'binary': + case '7bit': + case '8bit': + case 'utf-8': { + const dataPropagation = (ctx) => { + if (ctx.name === 'partData') { + part.emit('data', ctx.buffer.slice(ctx.start, ctx.end)); + } + }; + const dataStopPropagation = (ctx) => { + if (ctx.name === 'partEnd') { + part.emit('end'); + parser.off('data', dataPropagation); + parser.off('data', dataStopPropagation); + } + }; + parser.on('data', dataPropagation); + parser.on('data', dataStopPropagation); + break; + } + case 'base64': { + const dataPropagation = (ctx) => { + if (ctx.name === 'partData') { + part.transferBuffer += ctx.buffer + .slice(ctx.start, ctx.end) + .toString('ascii'); + + /* + four bytes (chars) in base64 converts to three bytes in binary + encoding. So we should always work with a number of bytes that + can be divided by 4, it will result in a number of buytes that + can be divided vy 3. + */ + const offset = parseInt(part.transferBuffer.length / 4, 10) * 4; + part.emit( + 'data', + Buffer.from( + part.transferBuffer.substring(0, offset), + 'base64', + ), + ); + part.transferBuffer = part.transferBuffer.substring(offset); + } + }; + const dataStopPropagation = (ctx) => { + if (ctx.name === 'partEnd') { + part.emit('data', Buffer.from(part.transferBuffer, 'base64')); + part.emit('end'); + parser.off('data', dataPropagation); + parser.off('data', dataStopPropagation); + } + }; + parser.on('data', dataPropagation); + parser.on('data', dataStopPropagation); + break; + } + default: + return this._error( + new FormidableError( + 'unknown transfer-encoding', + errors.unknownTransferEncoding, + 501, + ), + ); + } + + this.onPart(part); + } else if (name === 'end') { + this.ended = true; + this._maybeEnd(); + } + }); + + this._parser = parser; + }; +} diff --git a/node_modules/formidable/src/plugins/octetstream.js b/node_modules/formidable/src/plugins/octetstream.js new file mode 100644 index 0000000..96fee40 --- /dev/null +++ b/node_modules/formidable/src/plugins/octetstream.js @@ -0,0 +1,86 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const OctetStreamParser = require('../parsers/OctetStream'); + +// the `options` is also available through the `options` / `formidable.options` +module.exports = function plugin(formidable, options) { + // the `this` context is always formidable, as the first argument of a plugin + // but this allows us to customize/test each plugin + + /* istanbul ignore next */ + const self = this || formidable; + + if (/octet-stream/i.test(self.headers['content-type'])) { + init.call(self, self, options); + } + + return self; +}; + +// Note that it's a good practice (but it's up to you) to use the `this.options` instead +// of the passed `options` (second) param, because when you decide +// to test the plugin you can pass custom `this` context to it (and so `this.options`) +function init(_self, _opts) { + this.type = 'octet-stream'; + const originalFilename = this.headers['x-file-name']; + const mimetype = this.headers['content-type']; + + const thisPart = { + originalFilename, + mimetype, + }; + const newFilename = this._getNewName(thisPart); + const filepath = this._joinDirectoryName(newFilename); + const file = this._newFile({ + newFilename, + filepath, + originalFilename, + mimetype, + }); + + this.emit('fileBegin', originalFilename, file); + file.open(); + this.openedFiles.push(file); + this._flushing += 1; + + this._parser = new OctetStreamParser(this.options); + + // Keep track of writes that haven't finished so we don't emit the file before it's done being written + let outstandingWrites = 0; + + this._parser.on('data', (buffer) => { + this.pause(); + outstandingWrites += 1; + + file.write(buffer, () => { + outstandingWrites -= 1; + this.resume(); + + if (this.ended) { + this._parser.emit('doneWritingFile'); + } + }); + }); + + this._parser.on('end', () => { + this._flushing -= 1; + this.ended = true; + + const done = () => { + file.end(() => { + this.emit('file', 'file', file); + this._maybeEnd(); + }); + }; + + if (outstandingWrites === 0) { + done(); + } else { + this._parser.once('doneWritingFile', done); + } + }); + + return this; +} diff --git a/node_modules/formidable/src/plugins/querystring.js b/node_modules/formidable/src/plugins/querystring.js new file mode 100644 index 0000000..c9dcf1e --- /dev/null +++ b/node_modules/formidable/src/plugins/querystring.js @@ -0,0 +1,42 @@ +/* eslint-disable no-underscore-dangle */ + +'use strict'; + +const QuerystringParser = require('../parsers/Querystring'); + +// the `options` is also available through the `this.options` / `formidable.options` +module.exports = function plugin(formidable, options) { + // the `this` context is always formidable, as the first argument of a plugin + // but this allows us to customize/test each plugin + + /* istanbul ignore next */ + const self = this || formidable; + + if (/urlencoded/i.test(self.headers['content-type'])) { + init.call(self, self, options); + } + + return self; +}; + +// Note that it's a good practice (but it's up to you) to use the `this.options` instead +// of the passed `options` (second) param, because when you decide +// to test the plugin you can pass custom `this` context to it (and so `this.options`) +function init(_self, _opts) { + this.type = 'urlencoded'; + + const parser = new QuerystringParser(this.options); + + parser.on('data', ({ key, value }) => { + this.emit('field', key, value); + }); + + parser.once('end', () => { + this.ended = true; + this._maybeEnd(); + }); + + this._parser = parser; + + return this; +} diff --git a/node_modules/function-bind/.editorconfig b/node_modules/function-bind/.editorconfig new file mode 100644 index 0000000..ac29ade --- /dev/null +++ b/node_modules/function-bind/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 120 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/function-bind/.eslintrc b/node_modules/function-bind/.eslintrc new file mode 100644 index 0000000..9b33d8e --- /dev/null +++ b/node_modules/function-bind/.eslintrc @@ -0,0 +1,15 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "func-name-matching": 0, + "indent": [2, 4], + "max-nested-callbacks": [2, 3], + "max-params": [2, 3], + "max-statements": [2, 20], + "no-new-func": [1], + "strict": [0] + } +} diff --git a/node_modules/function-bind/.jscs.json b/node_modules/function-bind/.jscs.json new file mode 100644 index 0000000..8c44794 --- /dev/null +++ b/node_modules/function-bind/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 8 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/function-bind/.npmignore b/node_modules/function-bind/.npmignore new file mode 100644 index 0000000..dbb555f --- /dev/null +++ b/node_modules/function-bind/.npmignore @@ -0,0 +1,22 @@ +# gitignore +.DS_Store +.monitor +.*.swp +.nodemonignore +releases +*.log +*.err +fleet.json +public/browserify +bin/*.json +.bin +build +compile +.lock-wscript +coverage +node_modules + +# Only apps should have lockfiles +npm-shrinkwrap.json +package-lock.json +yarn.lock diff --git a/node_modules/function-bind/.travis.yml b/node_modules/function-bind/.travis.yml new file mode 100644 index 0000000..85f70d2 --- /dev/null +++ b/node_modules/function-bind/.travis.yml @@ -0,0 +1,168 @@ +language: node_js +os: + - linux +node_js: + - "8.4" + - "7.10" + - "6.11" + - "5.12" + - "4.8" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then npm install -g npm@1.3 ; elif [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then case "$(npm --version)" in 1.*) npm install -g npm@1.4.28 ;; 2.*) npm install -g npm@2 ;; esac ; fi' + - 'if [ "${TRAVIS_NODE_VERSION}" != "0.6" ] && [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then if [ "${TRAVIS_NODE_VERSION%${TRAVIS_NODE_VERSION#[0-9]}}" = "0" ] || [ "${TRAVIS_NODE_VERSION:0:4}" = "iojs" ]; then npm install -g npm@4.5 ; else npm install -g npm; fi; fi' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then nvm install 0.8 && npm install -g npm@1.3 && npm install -g npm@1.4.28 && npm install -g npm@2 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "node" + env: PRETEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/function-bind/LICENSE b/node_modules/function-bind/LICENSE new file mode 100644 index 0000000..62d6d23 --- /dev/null +++ b/node_modules/function-bind/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/node_modules/function-bind/README.md b/node_modules/function-bind/README.md new file mode 100644 index 0000000..81862a0 --- /dev/null +++ b/node_modules/function-bind/README.md @@ -0,0 +1,48 @@ +# function-bind + + + + + +Implementation of function.prototype.bind + +## Example + +I mainly do this for unit tests I run on phantomjs. +PhantomJS does not have Function.prototype.bind :( + +```js +Function.prototype.bind = require("function-bind") +``` + +## Installation + +`npm install function-bind` + +## Contributors + + - Raynos + +## MIT Licenced + + [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg + [travis-url]: https://travis-ci.org/Raynos/function-bind + [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg + [npm-url]: https://npmjs.org/package/function-bind + [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png + [6]: https://coveralls.io/r/Raynos/function-bind + [7]: https://gemnasium.com/Raynos/function-bind.png + [8]: https://gemnasium.com/Raynos/function-bind + [deps-svg]: https://david-dm.org/Raynos/function-bind.svg + [deps-url]: https://david-dm.org/Raynos/function-bind + [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg + [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies + [11]: https://ci.testling.com/Raynos/function-bind.png + [12]: https://ci.testling.com/Raynos/function-bind diff --git a/node_modules/function-bind/implementation.js b/node_modules/function-bind/implementation.js new file mode 100644 index 0000000..cc4daec --- /dev/null +++ b/node_modules/function-bind/implementation.js @@ -0,0 +1,52 @@ +'use strict'; + +/* eslint no-invalid-this: 1 */ + +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var slice = Array.prototype.slice; +var toStr = Object.prototype.toString; +var funcType = '[object Function]'; + +module.exports = function bind(that) { + var target = this; + if (typeof target !== 'function' || toStr.call(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + var args = slice.call(arguments, 1); + + var bound; + var binder = function () { + if (this instanceof bound) { + var result = target.apply( + this, + args.concat(slice.call(arguments)) + ); + if (Object(result) === result) { + return result; + } + return this; + } else { + return target.apply( + that, + args.concat(slice.call(arguments)) + ); + } + }; + + var boundLength = Math.max(0, target.length - args.length); + var boundArgs = []; + for (var i = 0; i < boundLength; i++) { + boundArgs.push('$' + i); + } + + bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; + } + + return bound; +}; diff --git a/node_modules/function-bind/index.js b/node_modules/function-bind/index.js new file mode 100644 index 0000000..3bb6b96 --- /dev/null +++ b/node_modules/function-bind/index.js @@ -0,0 +1,5 @@ +'use strict'; + +var implementation = require('./implementation'); + +module.exports = Function.prototype.bind || implementation; diff --git a/node_modules/function-bind/package.json b/node_modules/function-bind/package.json new file mode 100644 index 0000000..20a1727 --- /dev/null +++ b/node_modules/function-bind/package.json @@ -0,0 +1,63 @@ +{ + "name": "function-bind", + "version": "1.1.1", + "description": "Implementation of Function.prototype.bind", + "keywords": [ + "function", + "bind", + "shim", + "es5" + ], + "author": "Raynos ", + "repository": "git://github.com/Raynos/function-bind.git", + "main": "index", + "homepage": "https://github.com/Raynos/function-bind", + "contributors": [ + { + "name": "Raynos" + }, + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "bugs": { + "url": "https://github.com/Raynos/function-bind/issues", + "email": "raynos2@gmail.com" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "covert": "^1.1.0", + "eslint": "^4.5.0", + "jscs": "^3.0.7", + "tape": "^4.8.0" + }, + "license": "MIT", + "scripts": { + "pretest": "npm run lint", + "test": "npm run tests-only", + "posttest": "npm run coverage -- --quiet", + "tests-only": "node test", + "coverage": "covert test/*.js", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js */*.js", + "eslint": "eslint *.js */*.js" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "ie/8..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/function-bind/test/.eslintrc b/node_modules/function-bind/test/.eslintrc new file mode 100644 index 0000000..8a56d5b --- /dev/null +++ b/node_modules/function-bind/test/.eslintrc @@ -0,0 +1,9 @@ +{ + "rules": { + "array-bracket-newline": 0, + "array-element-newline": 0, + "max-statements-per-line": [2, { "max": 2 }], + "no-invalid-this": 0, + "no-magic-numbers": 0, + } +} diff --git a/node_modules/function-bind/test/index.js b/node_modules/function-bind/test/index.js new file mode 100644 index 0000000..2edecce --- /dev/null +++ b/node_modules/function-bind/test/index.js @@ -0,0 +1,252 @@ +// jscs:disable requireUseStrict + +var test = require('tape'); + +var functionBind = require('../implementation'); +var getCurrentContext = function () { return this; }; + +test('functionBind is a function', function (t) { + t.equal(typeof functionBind, 'function'); + t.end(); +}); + +test('non-functions', function (t) { + var nonFunctions = [true, false, [], {}, 42, 'foo', NaN, /a/g]; + t.plan(nonFunctions.length); + for (var i = 0; i < nonFunctions.length; ++i) { + try { functionBind.call(nonFunctions[i]); } catch (ex) { + t.ok(ex instanceof TypeError, 'throws when given ' + String(nonFunctions[i])); + } + } + t.end(); +}); + +test('without a context', function (t) { + t.test('binds properly', function (st) { + var args, context; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }) + }; + namespace.func(1, 2, 3); + st.deepEqual(args, [1, 2, 3]); + st.equal(context, getCurrentContext.call()); + st.end(); + }); + + t.test('binds properly, and still supplies bound arguments', function (st) { + var args, context; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, undefined, 1, 2, 3) + }; + namespace.func(4, 5, 6); + st.deepEqual(args, [1, 2, 3, 4, 5, 6]); + st.equal(context, getCurrentContext.call()); + st.end(); + }); + + t.test('returns properly', function (st) { + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, null) + }; + var context = namespace.func(1, 2, 3); + st.equal(context, getCurrentContext.call(), 'returned context is namespaced context'); + st.deepEqual(args, [1, 2, 3], 'passed arguments are correct'); + st.end(); + }); + + t.test('returns properly with bound arguments', function (st) { + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, null, 1, 2, 3) + }; + var context = namespace.func(4, 5, 6); + st.equal(context, getCurrentContext.call(), 'returned context is namespaced context'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'passed arguments are correct'); + st.end(); + }); + + t.test('called as a constructor', function (st) { + var thunkify = function (value) { + return function () { return value; }; + }; + st.test('returns object value', function (sst) { + var expectedReturnValue = [1, 2, 3]; + var Constructor = functionBind.call(thunkify(expectedReturnValue), null); + var result = new Constructor(); + sst.equal(result, expectedReturnValue); + sst.end(); + }); + + st.test('does not return primitive value', function (sst) { + var Constructor = functionBind.call(thunkify(42), null); + var result = new Constructor(); + sst.notEqual(result, 42); + sst.end(); + }); + + st.test('object from bound constructor is instance of original and bound constructor', function (sst) { + var A = function (x) { + this.name = x || 'A'; + }; + var B = functionBind.call(A, null, 'B'); + + var result = new B(); + sst.ok(result instanceof B, 'result is instance of bound constructor'); + sst.ok(result instanceof A, 'result is instance of original constructor'); + sst.end(); + }); + + st.end(); + }); + + t.end(); +}); + +test('with a context', function (t) { + t.test('with no bound arguments', function (st) { + var args, context; + var boundContext = {}; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, boundContext) + }; + namespace.func(1, 2, 3); + st.equal(context, boundContext, 'binds a context properly'); + st.deepEqual(args, [1, 2, 3], 'supplies passed arguments'); + st.end(); + }); + + t.test('with bound arguments', function (st) { + var args, context; + var boundContext = {}; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, boundContext, 1, 2, 3) + }; + namespace.func(4, 5, 6); + st.equal(context, boundContext, 'binds a context properly'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'supplies bound and passed arguments'); + st.end(); + }); + + t.test('returns properly', function (st) { + var boundContext = {}; + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, boundContext) + }; + var context = namespace.func(1, 2, 3); + st.equal(context, boundContext, 'returned context is bound context'); + st.notEqual(context, getCurrentContext.call(), 'returned context is not lexical context'); + st.deepEqual(args, [1, 2, 3], 'passed arguments are correct'); + st.end(); + }); + + t.test('returns properly with bound arguments', function (st) { + var boundContext = {}; + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, boundContext, 1, 2, 3) + }; + var context = namespace.func(4, 5, 6); + st.equal(context, boundContext, 'returned context is bound context'); + st.notEqual(context, getCurrentContext.call(), 'returned context is not lexical context'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'passed arguments are correct'); + st.end(); + }); + + t.test('passes the correct arguments when called as a constructor', function (st) { + var expected = { name: 'Correct' }; + var namespace = { + Func: functionBind.call(function (arg) { + return arg; + }, { name: 'Incorrect' }) + }; + var returned = new namespace.Func(expected); + st.equal(returned, expected, 'returns the right arg when called as a constructor'); + st.end(); + }); + + t.test('has the new instance\'s context when called as a constructor', function (st) { + var actualContext; + var expectedContext = { foo: 'bar' }; + var namespace = { + Func: functionBind.call(function () { + actualContext = this; + }, expectedContext) + }; + var result = new namespace.Func(); + st.equal(result instanceof namespace.Func, true); + st.notEqual(actualContext, expectedContext); + st.end(); + }); + + t.end(); +}); + +test('bound function length', function (t) { + t.test('sets a correct length without thisArg', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }); + st.equal(subject.length, 3); + st.equal(subject(1, 2, 3), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}); + st.equal(subject.length, 3); + st.equal(subject(1, 2, 3), 6); + st.end(); + }); + + t.test('sets a correct length without thisArg and first argument', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, undefined, 1); + st.equal(subject.length, 2); + st.equal(subject(2, 3), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg and first argument', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}, 1); + st.equal(subject.length, 2); + st.equal(subject(2, 3), 6); + st.end(); + }); + + t.test('sets a correct length without thisArg and too many arguments', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, undefined, 1, 2, 3, 4); + st.equal(subject.length, 0); + st.equal(subject(), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg and too many arguments', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}, 1, 2, 3, 4); + st.equal(subject.length, 0); + st.equal(subject(), 6); + st.end(); + }); +}); diff --git a/node_modules/get-intrinsic/.eslintrc b/node_modules/get-intrinsic/.eslintrc new file mode 100644 index 0000000..0ab0876 --- /dev/null +++ b/node_modules/get-intrinsic/.eslintrc @@ -0,0 +1,37 @@ +{ + "root": true, + + "extends": "@ljharb", + + "env": { + "es6": true, + "es2017": true, + "es2020": true, + "es2021": true, + "es2022": true, + }, + + "rules": { + "array-bracket-newline": 0, + "complexity": 0, + "eqeqeq": [2, "allow-null"], + "func-name-matching": 0, + "id-length": 0, + "max-lines-per-function": [2, 90], + "max-params": [2, 4], + "max-statements": 0, + "max-statements-per-line": [2, { "max": 2 }], + "multiline-comment-style": 0, + "no-magic-numbers": 0, + "sort-keys": 0, + }, + + "overrides": [ + { + "files": "test/**", + "rules": { + "new-cap": 0, + }, + }, + ], +} diff --git a/node_modules/get-intrinsic/.github/FUNDING.yml b/node_modules/get-intrinsic/.github/FUNDING.yml new file mode 100644 index 0000000..8e8da0d --- /dev/null +++ b/node_modules/get-intrinsic/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/get-intrinsic +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/get-intrinsic/.nycrc b/node_modules/get-intrinsic/.nycrc new file mode 100644 index 0000000..bdd626c --- /dev/null +++ b/node_modules/get-intrinsic/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/get-intrinsic/CHANGELOG.md b/node_modules/get-intrinsic/CHANGELOG.md new file mode 100644 index 0000000..1bb52d3 --- /dev/null +++ b/node_modules/get-intrinsic/CHANGELOG.md @@ -0,0 +1,98 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.1.3](https://github.com/ljharb/get-intrinsic/compare/v1.1.2...v1.1.3) - 2022-09-12 + +### Commits + +- [Dev Deps] update `es-abstract`, `es-value-fixtures`, `tape` [`07ff291`](https://github.com/ljharb/get-intrinsic/commit/07ff291816406ebe5a12d7f16965bde0942dd688) +- [Fix] properly check for % signs [`50ac176`](https://github.com/ljharb/get-intrinsic/commit/50ac1760fe99c227e64eabde76e9c0e44cd881b5) + +## [v1.1.2](https://github.com/ljharb/get-intrinsic/compare/v1.1.1...v1.1.2) - 2022-06-08 + +### Fixed + +- [Fix] properly validate against extra % signs [`#16`](https://github.com/ljharb/get-intrinsic/issues/16) + +### Commits + +- [actions] reuse common workflows [`0972547`](https://github.com/ljharb/get-intrinsic/commit/0972547efd0abc863fe4c445a6ca7eb4f8c6901d) +- [meta] use `npmignore` to autogenerate an npmignore file [`5ba0b51`](https://github.com/ljharb/get-intrinsic/commit/5ba0b51d8d8d4f1c31d426d74abc0770fd106bad) +- [actions] use `node/install` instead of `node/run`; use `codecov` action [`c364492`](https://github.com/ljharb/get-intrinsic/commit/c364492af4af51333e6f81c0bf21fd3d602c3661) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `es-abstract`, `object-inspect`, `tape` [`dc04dad`](https://github.com/ljharb/get-intrinsic/commit/dc04dad86f6e5608775a2640cb0db5927ae29ed9) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `es-abstract`, `object-inspect`, `safe-publish-latest`, `tape` [`1c14059`](https://github.com/ljharb/get-intrinsic/commit/1c1405984e86dd2dc9366c15d8a0294a96a146a5) +- [Tests] use `mock-property` [`b396ef0`](https://github.com/ljharb/get-intrinsic/commit/b396ef05bb73b1d699811abd64b0d9b97997fdda) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `object-inspect`, `tape` [`c2c758d`](https://github.com/ljharb/get-intrinsic/commit/c2c758d3b90af4fef0a76910d8d3c292ec8d1d3e) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `es-abstract`, `es-value-fixtures`, `object-inspect`, `tape` [`29e3c09`](https://github.com/ljharb/get-intrinsic/commit/29e3c091c2bf3e17099969847e8729d0e46896de) +- [actions] update codecov uploader [`8cbc141`](https://github.com/ljharb/get-intrinsic/commit/8cbc1418940d7a8941f3a7985cbc4ac095c5e13d) +- [Dev Deps] update `@ljharb/eslint-config`, `es-abstract`, `es-value-fixtures`, `object-inspect`, `tape` [`10b6f5c`](https://github.com/ljharb/get-intrinsic/commit/10b6f5c02593fb3680c581d696ac124e30652932) +- [readme] add github actions/codecov badges [`4e25400`](https://github.com/ljharb/get-intrinsic/commit/4e25400d9f51ae9eb059cbe22d9144e70ea214e8) +- [Tests] use `for-each` instead of `foreach` [`c05b957`](https://github.com/ljharb/get-intrinsic/commit/c05b957ad9a7bc7721af7cc9e9be1edbfe057496) +- [Dev Deps] update `es-abstract` [`29b05ae`](https://github.com/ljharb/get-intrinsic/commit/29b05aec3e7330e9ad0b8e0f685a9112c20cdd97) +- [meta] use `prepublishOnly` script for npm 7+ [`95c285d`](https://github.com/ljharb/get-intrinsic/commit/95c285da810516057d3bbfa871176031af38f05d) +- [Deps] update `has-symbols` [`593cb4f`](https://github.com/ljharb/get-intrinsic/commit/593cb4fb38e7922e40e42c183f45274b636424cd) +- [readme] fix repo URLs [`1c8305b`](https://github.com/ljharb/get-intrinsic/commit/1c8305b5365827c9b6fc785434aac0e1328ff2f5) +- [Deps] update `has-symbols` [`c7138b6`](https://github.com/ljharb/get-intrinsic/commit/c7138b6c6d73132d859471fb8c13304e1e7c8b20) +- [Dev Deps] remove unused `has-bigints` [`bd63aff`](https://github.com/ljharb/get-intrinsic/commit/bd63aff6ad8f3a986c557fcda2914187bdaab359) + +## [v1.1.1](https://github.com/ljharb/get-intrinsic/compare/v1.1.0...v1.1.1) - 2021-02-03 + +### Fixed + +- [meta] export `./package.json` [`#9`](https://github.com/ljharb/get-intrinsic/issues/9) + +### Commits + +- [readme] flesh out the readme; use `evalmd` [`d12f12c`](https://github.com/ljharb/get-intrinsic/commit/d12f12c15345a0a0772cc65a7c64369529abd614) +- [eslint] set up proper globals config [`5a8c098`](https://github.com/ljharb/get-intrinsic/commit/5a8c0984e3319d1ac0e64b102f8ec18b64e79f36) +- [Dev Deps] update `eslint` [`7b9a5c0`](https://github.com/ljharb/get-intrinsic/commit/7b9a5c0d31a90ca1a1234181c74988fb046701cd) + +## [v1.1.0](https://github.com/ljharb/get-intrinsic/compare/v1.0.2...v1.1.0) - 2021-01-25 + +### Fixed + +- [Refactor] delay `Function` eval until syntax-derived values are requested [`#3`](https://github.com/ljharb/get-intrinsic/issues/3) + +### Commits + +- [Tests] migrate tests to Github Actions [`2ab762b`](https://github.com/ljharb/get-intrinsic/commit/2ab762b48164aea8af37a40ba105bbc8246ab8c4) +- [meta] do not publish github action workflow files [`5e7108e`](https://github.com/ljharb/get-intrinsic/commit/5e7108e4768b244d48d9567ba4f8a6cab9c65b8e) +- [Tests] add some coverage [`01ac7a8`](https://github.com/ljharb/get-intrinsic/commit/01ac7a87ac29738567e8524cd8c9e026b1fa8cb3) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `call-bind`, `es-abstract`, `tape`; add `call-bind` [`911b672`](https://github.com/ljharb/get-intrinsic/commit/911b672fbffae433a96924c6ce013585e425f4b7) +- [Refactor] rearrange evalled constructors a bit [`7e7e4bf`](https://github.com/ljharb/get-intrinsic/commit/7e7e4bf583f3799c8ac1c6c5e10d2cb553957347) +- [meta] add Automatic Rebase and Require Allow Edits workflows [`0199968`](https://github.com/ljharb/get-intrinsic/commit/01999687a263ffce0a3cb011dfbcb761754aedbc) + +## [v1.0.2](https://github.com/ljharb/get-intrinsic/compare/v1.0.1...v1.0.2) - 2020-12-17 + +### Commits + +- [Fix] Throw for non‑existent intrinsics [`68f873b`](https://github.com/ljharb/get-intrinsic/commit/68f873b013c732a05ad6f5fc54f697e55515461b) +- [Fix] Throw for non‑existent segments in the intrinsic path [`8325dee`](https://github.com/ljharb/get-intrinsic/commit/8325deee43128f3654d3399aa9591741ebe17b21) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `has-bigints`, `object-inspect` [`0c227a7`](https://github.com/ljharb/get-intrinsic/commit/0c227a7d8b629166f25715fd242553892e458525) +- [meta] do not lint coverage output [`70d2419`](https://github.com/ljharb/get-intrinsic/commit/70d24199b620043cd9110fc5f426d214ebe21dc9) + +## [v1.0.1](https://github.com/ljharb/get-intrinsic/compare/v1.0.0...v1.0.1) - 2020-10-30 + +### Commits + +- [Tests] gather coverage data on every job [`d1d280d`](https://github.com/ljharb/get-intrinsic/commit/d1d280dec714e3f0519cc877dbcb193057d9cac6) +- [Fix] add missing dependencies [`5031771`](https://github.com/ljharb/get-intrinsic/commit/5031771bb1095b38be88ce7c41d5de88718e432e) +- [Tests] use `es-value-fixtures` [`af48765`](https://github.com/ljharb/get-intrinsic/commit/af48765a23c5323fb0b6b38dbf00eb5099c7bebc) + +## v1.0.0 - 2020-10-29 + +### Commits + +- Implementation [`bbce57c`](https://github.com/ljharb/get-intrinsic/commit/bbce57c6f33d05b2d8d3efa273ceeb3ee01127bb) +- Tests [`17b4f0d`](https://github.com/ljharb/get-intrinsic/commit/17b4f0d56dea6b4059b56fc30ef3ee4d9500ebc2) +- Initial commit [`3153294`](https://github.com/ljharb/get-intrinsic/commit/31532948de363b0a27dd9fd4649e7b7028ec4b44) +- npm init [`fb326c4`](https://github.com/ljharb/get-intrinsic/commit/fb326c4d2817c8419ec31de1295f06bb268a7902) +- [meta] add Automatic Rebase and Require Allow Edits workflows [`48862fb`](https://github.com/ljharb/get-intrinsic/commit/48862fb2508c8f6a57968e6d08b7c883afc9d550) +- [meta] add `auto-changelog` [`5f28ad0`](https://github.com/ljharb/get-intrinsic/commit/5f28ad019e060a353d8028f9f2591a9cc93074a1) +- [meta] add "funding"; create `FUNDING.yml` [`c2bbdde`](https://github.com/ljharb/get-intrinsic/commit/c2bbddeba73a875be61484ee4680b129a6d4e0a1) +- [Tests] add `npm run lint` [`0a84b98`](https://github.com/ljharb/get-intrinsic/commit/0a84b98b22b7cf7a748666f705b0003a493c35fd) +- Only apps should have lockfiles [`9586c75`](https://github.com/ljharb/get-intrinsic/commit/9586c75866c1ee678e4d5d4dbbdef6997e511b05) diff --git a/node_modules/get-intrinsic/LICENSE b/node_modules/get-intrinsic/LICENSE new file mode 100644 index 0000000..48f05d0 --- /dev/null +++ b/node_modules/get-intrinsic/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/get-intrinsic/README.md b/node_modules/get-intrinsic/README.md new file mode 100644 index 0000000..3aa0bba --- /dev/null +++ b/node_modules/get-intrinsic/README.md @@ -0,0 +1,71 @@ +# get-intrinsic [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +Get and robustly cache all JS language-level intrinsics at first require time. + +See the syntax described [in the JS spec](https://tc39.es/ecma262/#sec-well-known-intrinsic-objects) for reference. + +## Example + +```js +var GetIntrinsic = require('get-intrinsic'); +var assert = require('assert'); + +// static methods +assert.equal(GetIntrinsic('%Math.pow%'), Math.pow); +assert.equal(Math.pow(2, 3), 8); +assert.equal(GetIntrinsic('%Math.pow%')(2, 3), 8); +delete Math.pow; +assert.equal(GetIntrinsic('%Math.pow%')(2, 3), 8); + +// instance methods +var arr = [1]; +assert.equal(GetIntrinsic('%Array.prototype.push%'), Array.prototype.push); +assert.deepEqual(arr, [1]); + +arr.push(2); +assert.deepEqual(arr, [1, 2]); + +GetIntrinsic('%Array.prototype.push%').call(arr, 3); +assert.deepEqual(arr, [1, 2, 3]); + +delete Array.prototype.push; +GetIntrinsic('%Array.prototype.push%').call(arr, 4); +assert.deepEqual(arr, [1, 2, 3, 4]); + +// missing features +delete JSON.parse; // to simulate a real intrinsic that is missing in the environment +assert.throws(() => GetIntrinsic('%JSON.parse%')); +assert.equal(undefined, GetIntrinsic('%JSON.parse%', true)); +``` + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +## Security + +Please email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report. + +[package-url]: https://npmjs.org/package/get-intrinsic +[npm-version-svg]: https://versionbadg.es/ljharb/get-intrinsic.svg +[deps-svg]: https://david-dm.org/ljharb/get-intrinsic.svg +[deps-url]: https://david-dm.org/ljharb/get-intrinsic +[dev-deps-svg]: https://david-dm.org/ljharb/get-intrinsic/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/get-intrinsic#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/get-intrinsic.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/get-intrinsic.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/get-intrinsic.svg +[downloads-url]: https://npm-stat.com/charts.html?package=get-intrinsic +[codecov-image]: https://codecov.io/gh/ljharb/get-intrinsic/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/ljharb/get-intrinsic/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/get-intrinsic +[actions-url]: https://github.com/ljharb/get-intrinsic/actions diff --git a/node_modules/get-intrinsic/index.js b/node_modules/get-intrinsic/index.js new file mode 100644 index 0000000..3c3d93e --- /dev/null +++ b/node_modules/get-intrinsic/index.js @@ -0,0 +1,334 @@ +'use strict'; + +var undefined; + +var $SyntaxError = SyntaxError; +var $Function = Function; +var $TypeError = TypeError; + +// eslint-disable-next-line consistent-return +var getEvalledConstructor = function (expressionSyntax) { + try { + return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')(); + } catch (e) {} +}; + +var $gOPD = Object.getOwnPropertyDescriptor; +if ($gOPD) { + try { + $gOPD({}, ''); + } catch (e) { + $gOPD = null; // this is IE 8, which has a broken gOPD + } +} + +var throwTypeError = function () { + throw new $TypeError(); +}; +var ThrowTypeError = $gOPD + ? (function () { + try { + // eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties + arguments.callee; // IE 8 does not throw here + return throwTypeError; + } catch (calleeThrows) { + try { + // IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '') + return $gOPD(arguments, 'callee').get; + } catch (gOPDthrows) { + return throwTypeError; + } + } + }()) + : throwTypeError; + +var hasSymbols = require('has-symbols')(); + +var getProto = Object.getPrototypeOf || function (x) { return x.__proto__; }; // eslint-disable-line no-proto + +var needsEval = {}; + +var TypedArray = typeof Uint8Array === 'undefined' ? undefined : getProto(Uint8Array); + +var INTRINSICS = { + '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError, + '%Array%': Array, + '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, + '%ArrayIteratorPrototype%': hasSymbols ? getProto([][Symbol.iterator]()) : undefined, + '%AsyncFromSyncIteratorPrototype%': undefined, + '%AsyncFunction%': needsEval, + '%AsyncGenerator%': needsEval, + '%AsyncGeneratorFunction%': needsEval, + '%AsyncIteratorPrototype%': needsEval, + '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, + '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt, + '%Boolean%': Boolean, + '%DataView%': typeof DataView === 'undefined' ? undefined : DataView, + '%Date%': Date, + '%decodeURI%': decodeURI, + '%decodeURIComponent%': decodeURIComponent, + '%encodeURI%': encodeURI, + '%encodeURIComponent%': encodeURIComponent, + '%Error%': Error, + '%eval%': eval, // eslint-disable-line no-eval + '%EvalError%': EvalError, + '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, + '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, + '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry, + '%Function%': $Function, + '%GeneratorFunction%': needsEval, + '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, + '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, + '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, + '%isFinite%': isFinite, + '%isNaN%': isNaN, + '%IteratorPrototype%': hasSymbols ? getProto(getProto([][Symbol.iterator]())) : undefined, + '%JSON%': typeof JSON === 'object' ? JSON : undefined, + '%Map%': typeof Map === 'undefined' ? undefined : Map, + '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols ? undefined : getProto(new Map()[Symbol.iterator]()), + '%Math%': Math, + '%Number%': Number, + '%Object%': Object, + '%parseFloat%': parseFloat, + '%parseInt%': parseInt, + '%Promise%': typeof Promise === 'undefined' ? undefined : Promise, + '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, + '%RangeError%': RangeError, + '%ReferenceError%': ReferenceError, + '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, + '%RegExp%': RegExp, + '%Set%': typeof Set === 'undefined' ? undefined : Set, + '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols ? undefined : getProto(new Set()[Symbol.iterator]()), + '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, + '%String%': String, + '%StringIteratorPrototype%': hasSymbols ? getProto(''[Symbol.iterator]()) : undefined, + '%Symbol%': hasSymbols ? Symbol : undefined, + '%SyntaxError%': $SyntaxError, + '%ThrowTypeError%': ThrowTypeError, + '%TypedArray%': TypedArray, + '%TypeError%': $TypeError, + '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, + '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, + '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, + '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, + '%URIError%': URIError, + '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, + '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef, + '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet +}; + +var doEval = function doEval(name) { + var value; + if (name === '%AsyncFunction%') { + value = getEvalledConstructor('async function () {}'); + } else if (name === '%GeneratorFunction%') { + value = getEvalledConstructor('function* () {}'); + } else if (name === '%AsyncGeneratorFunction%') { + value = getEvalledConstructor('async function* () {}'); + } else if (name === '%AsyncGenerator%') { + var fn = doEval('%AsyncGeneratorFunction%'); + if (fn) { + value = fn.prototype; + } + } else if (name === '%AsyncIteratorPrototype%') { + var gen = doEval('%AsyncGenerator%'); + if (gen) { + value = getProto(gen.prototype); + } + } + + INTRINSICS[name] = value; + + return value; +}; + +var LEGACY_ALIASES = { + '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'], + '%ArrayPrototype%': ['Array', 'prototype'], + '%ArrayProto_entries%': ['Array', 'prototype', 'entries'], + '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'], + '%ArrayProto_keys%': ['Array', 'prototype', 'keys'], + '%ArrayProto_values%': ['Array', 'prototype', 'values'], + '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'], + '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'], + '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'], + '%BooleanPrototype%': ['Boolean', 'prototype'], + '%DataViewPrototype%': ['DataView', 'prototype'], + '%DatePrototype%': ['Date', 'prototype'], + '%ErrorPrototype%': ['Error', 'prototype'], + '%EvalErrorPrototype%': ['EvalError', 'prototype'], + '%Float32ArrayPrototype%': ['Float32Array', 'prototype'], + '%Float64ArrayPrototype%': ['Float64Array', 'prototype'], + '%FunctionPrototype%': ['Function', 'prototype'], + '%Generator%': ['GeneratorFunction', 'prototype'], + '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'], + '%Int8ArrayPrototype%': ['Int8Array', 'prototype'], + '%Int16ArrayPrototype%': ['Int16Array', 'prototype'], + '%Int32ArrayPrototype%': ['Int32Array', 'prototype'], + '%JSONParse%': ['JSON', 'parse'], + '%JSONStringify%': ['JSON', 'stringify'], + '%MapPrototype%': ['Map', 'prototype'], + '%NumberPrototype%': ['Number', 'prototype'], + '%ObjectPrototype%': ['Object', 'prototype'], + '%ObjProto_toString%': ['Object', 'prototype', 'toString'], + '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'], + '%PromisePrototype%': ['Promise', 'prototype'], + '%PromiseProto_then%': ['Promise', 'prototype', 'then'], + '%Promise_all%': ['Promise', 'all'], + '%Promise_reject%': ['Promise', 'reject'], + '%Promise_resolve%': ['Promise', 'resolve'], + '%RangeErrorPrototype%': ['RangeError', 'prototype'], + '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'], + '%RegExpPrototype%': ['RegExp', 'prototype'], + '%SetPrototype%': ['Set', 'prototype'], + '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'], + '%StringPrototype%': ['String', 'prototype'], + '%SymbolPrototype%': ['Symbol', 'prototype'], + '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'], + '%TypedArrayPrototype%': ['TypedArray', 'prototype'], + '%TypeErrorPrototype%': ['TypeError', 'prototype'], + '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'], + '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'], + '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'], + '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'], + '%URIErrorPrototype%': ['URIError', 'prototype'], + '%WeakMapPrototype%': ['WeakMap', 'prototype'], + '%WeakSetPrototype%': ['WeakSet', 'prototype'] +}; + +var bind = require('function-bind'); +var hasOwn = require('has'); +var $concat = bind.call(Function.call, Array.prototype.concat); +var $spliceApply = bind.call(Function.apply, Array.prototype.splice); +var $replace = bind.call(Function.call, String.prototype.replace); +var $strSlice = bind.call(Function.call, String.prototype.slice); +var $exec = bind.call(Function.call, RegExp.prototype.exec); + +/* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */ +var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g; +var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */ +var stringToPath = function stringToPath(string) { + var first = $strSlice(string, 0, 1); + var last = $strSlice(string, -1); + if (first === '%' && last !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`'); + } else if (last === '%' && first !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`'); + } + var result = []; + $replace(string, rePropName, function (match, number, quote, subString) { + result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match; + }); + return result; +}; +/* end adaptation */ + +var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) { + var intrinsicName = name; + var alias; + if (hasOwn(LEGACY_ALIASES, intrinsicName)) { + alias = LEGACY_ALIASES[intrinsicName]; + intrinsicName = '%' + alias[0] + '%'; + } + + if (hasOwn(INTRINSICS, intrinsicName)) { + var value = INTRINSICS[intrinsicName]; + if (value === needsEval) { + value = doEval(intrinsicName); + } + if (typeof value === 'undefined' && !allowMissing) { + throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); + } + + return { + alias: alias, + name: intrinsicName, + value: value + }; + } + + throw new $SyntaxError('intrinsic ' + name + ' does not exist!'); +}; + +module.exports = function GetIntrinsic(name, allowMissing) { + if (typeof name !== 'string' || name.length === 0) { + throw new $TypeError('intrinsic name must be a non-empty string'); + } + if (arguments.length > 1 && typeof allowMissing !== 'boolean') { + throw new $TypeError('"allowMissing" argument must be a boolean'); + } + + if ($exec(/^%?[^%]*%?$/, name) === null) { + throw new $SyntaxError('`%` may not be present anywhere but at the beginning and end of the intrinsic name'); + } + var parts = stringToPath(name); + var intrinsicBaseName = parts.length > 0 ? parts[0] : ''; + + var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing); + var intrinsicRealName = intrinsic.name; + var value = intrinsic.value; + var skipFurtherCaching = false; + + var alias = intrinsic.alias; + if (alias) { + intrinsicBaseName = alias[0]; + $spliceApply(parts, $concat([0, 1], alias)); + } + + for (var i = 1, isOwn = true; i < parts.length; i += 1) { + var part = parts[i]; + var first = $strSlice(part, 0, 1); + var last = $strSlice(part, -1); + if ( + ( + (first === '"' || first === "'" || first === '`') + || (last === '"' || last === "'" || last === '`') + ) + && first !== last + ) { + throw new $SyntaxError('property names with quotes must have matching quotes'); + } + if (part === 'constructor' || !isOwn) { + skipFurtherCaching = true; + } + + intrinsicBaseName += '.' + part; + intrinsicRealName = '%' + intrinsicBaseName + '%'; + + if (hasOwn(INTRINSICS, intrinsicRealName)) { + value = INTRINSICS[intrinsicRealName]; + } else if (value != null) { + if (!(part in value)) { + if (!allowMissing) { + throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.'); + } + return void undefined; + } + if ($gOPD && (i + 1) >= parts.length) { + var desc = $gOPD(value, part); + isOwn = !!desc; + + // By convention, when a data property is converted to an accessor + // property to emulate a data property that does not suffer from + // the override mistake, that accessor's getter is marked with + // an `originalValue` property. Here, when we detect this, we + // uphold the illusion by pretending to see that original data + // property, i.e., returning the value rather than the getter + // itself. + if (isOwn && 'get' in desc && !('originalValue' in desc.get)) { + value = desc.get; + } else { + value = value[part]; + } + } else { + isOwn = hasOwn(value, part); + value = value[part]; + } + + if (isOwn && !skipFurtherCaching) { + INTRINSICS[intrinsicRealName] = value; + } + } + } + return value; +}; diff --git a/node_modules/get-intrinsic/package.json b/node_modules/get-intrinsic/package.json new file mode 100644 index 0000000..dfde032 --- /dev/null +++ b/node_modules/get-intrinsic/package.json @@ -0,0 +1,91 @@ +{ + "name": "get-intrinsic", + "version": "1.1.3", + "description": "Get and robustly cache all JS language-level intrinsics at first require time", + "main": "index.js", + "exports": { + ".": [ + { + "default": "./index.js" + }, + "./index.js" + ], + "./package.json": "./package.json" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=autogenerated", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "prelint": "evalmd README.md", + "lint": "eslint --ext=.js,.mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/get-intrinsic.git" + }, + "keywords": [ + "javascript", + "ecmascript", + "es", + "js", + "intrinsic", + "getintrinsic", + "es-abstract" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/get-intrinsic/issues" + }, + "homepage": "https://github.com/ljharb/get-intrinsic#readme", + "devDependencies": { + "@ljharb/eslint-config": "^21.0.0", + "aud": "^2.0.0", + "auto-changelog": "^2.4.0", + "call-bind": "^1.0.2", + "es-abstract": "^1.20.2", + "es-value-fixtures": "^1.4.2", + "eslint": "=8.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "make-async-function": "^1.0.0", + "make-async-generator-function": "^1.0.0", + "make-generator-function": "^2.0.0", + "mock-property": "^1.0.0", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "object-inspect": "^1.12.2", + "safe-publish-latest": "^2.0.0", + "tape": "^5.6.0" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "testling": { + "files": "test/GetIntrinsic.js" + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + } +} diff --git a/node_modules/get-intrinsic/test/GetIntrinsic.js b/node_modules/get-intrinsic/test/GetIntrinsic.js new file mode 100644 index 0000000..7e0ea30 --- /dev/null +++ b/node_modules/get-intrinsic/test/GetIntrinsic.js @@ -0,0 +1,274 @@ +'use strict'; + +var GetIntrinsic = require('../'); + +var test = require('tape'); +var forEach = require('for-each'); +var debug = require('object-inspect'); +var generatorFns = require('make-generator-function')(); +var asyncFns = require('make-async-function').list(); +var asyncGenFns = require('make-async-generator-function')(); +var mockProperty = require('mock-property'); + +var callBound = require('call-bind/callBound'); +var v = require('es-value-fixtures'); +var $gOPD = require('es-abstract/helpers/getOwnPropertyDescriptor'); +var DefinePropertyOrThrow = require('es-abstract/2021/DefinePropertyOrThrow'); + +var $isProto = callBound('%Object.prototype.isPrototypeOf%'); + +test('export', function (t) { + t.equal(typeof GetIntrinsic, 'function', 'it is a function'); + t.equal(GetIntrinsic.length, 2, 'function has length of 2'); + + t.end(); +}); + +test('throws', function (t) { + t['throws']( + function () { GetIntrinsic('not an intrinsic'); }, + SyntaxError, + 'nonexistent intrinsic throws a syntax error' + ); + + t['throws']( + function () { GetIntrinsic(''); }, + TypeError, + 'empty string intrinsic throws a type error' + ); + + t['throws']( + function () { GetIntrinsic('.'); }, + SyntaxError, + '"just a dot" intrinsic throws a syntax error' + ); + + t['throws']( + function () { GetIntrinsic('%String'); }, + SyntaxError, + 'Leading % without trailing % throws a syntax error' + ); + + t['throws']( + function () { GetIntrinsic('String%'); }, + SyntaxError, + 'Trailing % without leading % throws a syntax error' + ); + + t['throws']( + function () { GetIntrinsic("String['prototype]"); }, + SyntaxError, + 'Dynamic property access is disallowed for intrinsics (unterminated string)' + ); + + t['throws']( + function () { GetIntrinsic('%Proxy.prototype.undefined%'); }, + TypeError, + "Throws when middle part doesn't exist (%Proxy.prototype.undefined%)" + ); + + t['throws']( + function () { GetIntrinsic('%Array.prototype%garbage%'); }, + SyntaxError, + 'Throws with extra percent signs' + ); + + t['throws']( + function () { GetIntrinsic('%Array.prototype%push%'); }, + SyntaxError, + 'Throws with extra percent signs, even on an existing intrinsic' + ); + + forEach(v.nonStrings, function (nonString) { + t['throws']( + function () { GetIntrinsic(nonString); }, + TypeError, + debug(nonString) + ' is not a String' + ); + }); + + forEach(v.nonBooleans, function (nonBoolean) { + t['throws']( + function () { GetIntrinsic('%', nonBoolean); }, + TypeError, + debug(nonBoolean) + ' is not a Boolean' + ); + }); + + forEach([ + 'toString', + 'propertyIsEnumerable', + 'hasOwnProperty' + ], function (objectProtoMember) { + t['throws']( + function () { GetIntrinsic(objectProtoMember); }, + SyntaxError, + debug(objectProtoMember) + ' is not an intrinsic' + ); + }); + + t.end(); +}); + +test('base intrinsics', function (t) { + t.equal(GetIntrinsic('%Object%'), Object, '%Object% yields Object'); + t.equal(GetIntrinsic('Object'), Object, 'Object yields Object'); + t.equal(GetIntrinsic('%Array%'), Array, '%Array% yields Array'); + t.equal(GetIntrinsic('Array'), Array, 'Array yields Array'); + + t.end(); +}); + +test('dotted paths', function (t) { + t.equal(GetIntrinsic('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% yields Object.prototype.toString'); + t.equal(GetIntrinsic('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString yields Object.prototype.toString'); + t.equal(GetIntrinsic('%Array.prototype.push%'), Array.prototype.push, '%Array.prototype.push% yields Array.prototype.push'); + t.equal(GetIntrinsic('Array.prototype.push'), Array.prototype.push, 'Array.prototype.push yields Array.prototype.push'); + + test('underscore paths are aliases for dotted paths', { skip: !Object.isFrozen || Object.isFrozen(Object.prototype) }, function (st) { + var original = GetIntrinsic('%ObjProto_toString%'); + + forEach([ + '%Object.prototype.toString%', + 'Object.prototype.toString', + '%ObjectPrototype.toString%', + 'ObjectPrototype.toString', + '%ObjProto_toString%', + 'ObjProto_toString' + ], function (name) { + DefinePropertyOrThrow(Object.prototype, 'toString', { + '[[Value]]': function toString() { + return original.apply(this, arguments); + } + }); + st.equal(GetIntrinsic(name), original, name + ' yields original Object.prototype.toString'); + }); + + DefinePropertyOrThrow(Object.prototype, 'toString', { '[[Value]]': original }); + st.end(); + }); + + test('dotted paths cache', { skip: !Object.isFrozen || Object.isFrozen(Object.prototype) }, function (st) { + var original = GetIntrinsic('%Object.prototype.propertyIsEnumerable%'); + + forEach([ + '%Object.prototype.propertyIsEnumerable%', + 'Object.prototype.propertyIsEnumerable', + '%ObjectPrototype.propertyIsEnumerable%', + 'ObjectPrototype.propertyIsEnumerable' + ], function (name) { + var restore = mockProperty(Object.prototype, 'propertyIsEnumerable', { + value: function propertyIsEnumerable() { + return original.apply(this, arguments); + } + }); + st.equal(GetIntrinsic(name), original, name + ' yields cached Object.prototype.propertyIsEnumerable'); + + restore(); + }); + + st.end(); + }); + + test('dotted path reports correct error', function (st) { + st['throws'](function () { + GetIntrinsic('%NonExistentIntrinsic.prototype.property%'); + }, /%NonExistentIntrinsic%/, 'The base intrinsic of %NonExistentIntrinsic.prototype.property% is %NonExistentIntrinsic%'); + + st['throws'](function () { + GetIntrinsic('%NonExistentIntrinsicPrototype.property%'); + }, /%NonExistentIntrinsicPrototype%/, 'The base intrinsic of %NonExistentIntrinsicPrototype.property% is %NonExistentIntrinsicPrototype%'); + + st.end(); + }); + + t.end(); +}); + +test('accessors', { skip: !$gOPD || typeof Map !== 'function' }, function (t) { + var actual = $gOPD(Map.prototype, 'size'); + t.ok(actual, 'Map.prototype.size has a descriptor'); + t.equal(typeof actual.get, 'function', 'Map.prototype.size has a getter function'); + t.equal(GetIntrinsic('%Map.prototype.size%'), actual.get, '%Map.prototype.size% yields the getter for it'); + t.equal(GetIntrinsic('Map.prototype.size'), actual.get, 'Map.prototype.size yields the getter for it'); + + t.end(); +}); + +test('generator functions', { skip: !generatorFns.length }, function (t) { + var $GeneratorFunction = GetIntrinsic('%GeneratorFunction%'); + var $GeneratorFunctionPrototype = GetIntrinsic('%Generator%'); + var $GeneratorPrototype = GetIntrinsic('%GeneratorPrototype%'); + + forEach(generatorFns, function (genFn) { + var fnName = genFn.name; + fnName = fnName ? "'" + fnName + "'" : 'genFn'; + + t.ok(genFn instanceof $GeneratorFunction, fnName + ' instanceof %GeneratorFunction%'); + t.ok($isProto($GeneratorFunctionPrototype, genFn), '%Generator% is prototype of ' + fnName); + t.ok($isProto($GeneratorPrototype, genFn.prototype), '%GeneratorPrototype% is prototype of ' + fnName + '.prototype'); + }); + + t.end(); +}); + +test('async functions', { skip: !asyncFns.length }, function (t) { + var $AsyncFunction = GetIntrinsic('%AsyncFunction%'); + var $AsyncFunctionPrototype = GetIntrinsic('%AsyncFunctionPrototype%'); + + forEach(asyncFns, function (asyncFn) { + var fnName = asyncFn.name; + fnName = fnName ? "'" + fnName + "'" : 'asyncFn'; + + t.ok(asyncFn instanceof $AsyncFunction, fnName + ' instanceof %AsyncFunction%'); + t.ok($isProto($AsyncFunctionPrototype, asyncFn), '%AsyncFunctionPrototype% is prototype of ' + fnName); + }); + + t.end(); +}); + +test('async generator functions', { skip: asyncGenFns.length === 0 }, function (t) { + var $AsyncGeneratorFunction = GetIntrinsic('%AsyncGeneratorFunction%'); + var $AsyncGeneratorFunctionPrototype = GetIntrinsic('%AsyncGenerator%'); + var $AsyncGeneratorPrototype = GetIntrinsic('%AsyncGeneratorPrototype%'); + + forEach(asyncGenFns, function (asyncGenFn) { + var fnName = asyncGenFn.name; + fnName = fnName ? "'" + fnName + "'" : 'asyncGenFn'; + + t.ok(asyncGenFn instanceof $AsyncGeneratorFunction, fnName + ' instanceof %AsyncGeneratorFunction%'); + t.ok($isProto($AsyncGeneratorFunctionPrototype, asyncGenFn), '%AsyncGenerator% is prototype of ' + fnName); + t.ok($isProto($AsyncGeneratorPrototype, asyncGenFn.prototype), '%AsyncGeneratorPrototype% is prototype of ' + fnName + '.prototype'); + }); + + t.end(); +}); + +test('%ThrowTypeError%', function (t) { + var $ThrowTypeError = GetIntrinsic('%ThrowTypeError%'); + + t.equal(typeof $ThrowTypeError, 'function', 'is a function'); + t['throws']( + $ThrowTypeError, + TypeError, + '%ThrowTypeError% throws a TypeError' + ); + + t.end(); +}); + +test('allowMissing', { skip: asyncGenFns.length > 0 }, function (t) { + t['throws']( + function () { GetIntrinsic('%AsyncGeneratorPrototype%'); }, + TypeError, + 'throws when missing' + ); + + t.equal( + GetIntrinsic('%AsyncGeneratorPrototype%', true), + undefined, + 'does not throw when allowMissing' + ); + + t.end(); +}); diff --git a/node_modules/github-auto-sync b/node_modules/github-auto-sync new file mode 120000 index 0000000..48c5675 --- /dev/null +++ b/node_modules/github-auto-sync @@ -0,0 +1 @@ +../tools/github \ No newline at end of file diff --git a/node_modules/has-symbols/.eslintrc b/node_modules/has-symbols/.eslintrc new file mode 100644 index 0000000..2d9a66a --- /dev/null +++ b/node_modules/has-symbols/.eslintrc @@ -0,0 +1,11 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "max-statements-per-line": [2, { "max": 2 }], + "no-magic-numbers": 0, + "multiline-comment-style": 0, + } +} diff --git a/node_modules/has-symbols/.github/FUNDING.yml b/node_modules/has-symbols/.github/FUNDING.yml new file mode 100644 index 0000000..04cf87e --- /dev/null +++ b/node_modules/has-symbols/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/has-symbols +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/has-symbols/.nycrc b/node_modules/has-symbols/.nycrc new file mode 100644 index 0000000..bdd626c --- /dev/null +++ b/node_modules/has-symbols/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/has-symbols/CHANGELOG.md b/node_modules/has-symbols/CHANGELOG.md new file mode 100644 index 0000000..cd532a2 --- /dev/null +++ b/node_modules/has-symbols/CHANGELOG.md @@ -0,0 +1,75 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.3](https://github.com/inspect-js/has-symbols/compare/v1.0.2...v1.0.3) - 2022-03-01 + +### Commits + +- [actions] use `node/install` instead of `node/run`; use `codecov` action [`518b28f`](https://github.com/inspect-js/has-symbols/commit/518b28f6c5a516cbccae30794e40aa9f738b1693) +- [meta] add `bugs` and `homepage` fields; reorder package.json [`c480b13`](https://github.com/inspect-js/has-symbols/commit/c480b13fd6802b557e1cef9749872cb5fdeef744) +- [actions] reuse common workflows [`01d0ee0`](https://github.com/inspect-js/has-symbols/commit/01d0ee0a8d97c0947f5edb73eb722027a77b2b07) +- [actions] update codecov uploader [`6424ebe`](https://github.com/inspect-js/has-symbols/commit/6424ebe86b2c9c7c3d2e9bd4413a4e4f168cb275) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `tape` [`dfa7e7f`](https://github.com/inspect-js/has-symbols/commit/dfa7e7ff38b594645d8c8222aab895157fa7e282) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `safe-publish-latest`, `tape` [`0c8d436`](https://github.com/inspect-js/has-symbols/commit/0c8d43685c45189cea9018191d4fd7eca91c9d02) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`9026554`](https://github.com/inspect-js/has-symbols/commit/902655442a1bf88e72b42345494ef0c60f5d36ab) +- [readme] add actions and codecov badges [`eaa9682`](https://github.com/inspect-js/has-symbols/commit/eaa9682f990f481d3acf7a1c7600bec36f7b3adc) +- [Dev Deps] update `eslint`, `tape` [`bc7a3ba`](https://github.com/inspect-js/has-symbols/commit/bc7a3ba46f27b7743f8a2579732d59d1b9ac791e) +- [Dev Deps] update `eslint`, `auto-changelog` [`0ace00a`](https://github.com/inspect-js/has-symbols/commit/0ace00af08a88cdd1e6ce0d60357d941c60c2d9f) +- [meta] use `prepublishOnly` script for npm 7+ [`093f72b`](https://github.com/inspect-js/has-symbols/commit/093f72bc2b0ed00c781f444922a5034257bf561d) +- [Tests] test on all 16 minors [`9b80d3d`](https://github.com/inspect-js/has-symbols/commit/9b80d3d9102529f04c20ec5b1fcc6e38426c6b03) + +## [v1.0.2](https://github.com/inspect-js/has-symbols/compare/v1.0.1...v1.0.2) - 2021-02-27 + +### Fixed + +- [Fix] use a universal way to get the original Symbol [`#11`](https://github.com/inspect-js/has-symbols/issues/11) + +### Commits + +- [Tests] migrate tests to Github Actions [`90ae798`](https://github.com/inspect-js/has-symbols/commit/90ae79820bdfe7bc703d67f5f3c5e205f98556d3) +- [meta] do not publish github action workflow files [`29e60a1`](https://github.com/inspect-js/has-symbols/commit/29e60a1b7c25c7f1acf7acff4a9320d0d10c49b4) +- [Tests] run `nyc` on all tests [`8476b91`](https://github.com/inspect-js/has-symbols/commit/8476b915650d360915abe2522505abf4b0e8f0ae) +- [readme] fix repo URLs, remove defunct badges [`126288e`](https://github.com/inspect-js/has-symbols/commit/126288ecc1797c0a40247a6b78bcb2e0bc5d7036) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `core-js`, `get-own-property-symbols` [`d84bdfa`](https://github.com/inspect-js/has-symbols/commit/d84bdfa48ac5188abbb4904b42614cd6c030940a) +- [Tests] fix linting errors [`0df3070`](https://github.com/inspect-js/has-symbols/commit/0df3070b981b6c9f2ee530c09189a7f5c6def839) +- [actions] add "Allow Edits" workflow [`1e6bc29`](https://github.com/inspect-js/has-symbols/commit/1e6bc29b188f32b9648657b07eda08504be5aa9c) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` [`36cea2a`](https://github.com/inspect-js/has-symbols/commit/36cea2addd4e6ec435f35a2656b4e9ef82498e9b) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`1278338`](https://github.com/inspect-js/has-symbols/commit/127833801865fbc2cc8979beb9ca869c7bfe8222) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`1493254`](https://github.com/inspect-js/has-symbols/commit/1493254eda13db5fb8fc5e4a3e8324b3d196029d) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `core-js` [`b090bf2`](https://github.com/inspect-js/has-symbols/commit/b090bf214d3679a30edc1e2d729d466ab5183e1d) +- [actions] switch Automatic Rebase workflow to `pull_request_target` event [`4addb7a`](https://github.com/inspect-js/has-symbols/commit/4addb7ab4dc73f927ae99928d68817554fc21dc0) +- [Dev Deps] update `auto-changelog`, `tape` [`81d0baf`](https://github.com/inspect-js/has-symbols/commit/81d0baf3816096a89a8558e8043895f7a7d10d8b) +- [Dev Deps] update `auto-changelog`; add `aud` [`1a4e561`](https://github.com/inspect-js/has-symbols/commit/1a4e5612c25d91c3a03d509721d02630bc4fe3da) +- [readme] remove unused testling URLs [`3000941`](https://github.com/inspect-js/has-symbols/commit/3000941f958046e923ed8152edb1ef4a599e6fcc) +- [Tests] only audit prod deps [`692e974`](https://github.com/inspect-js/has-symbols/commit/692e9743c912410e9440207631a643a34b4741a1) +- [Dev Deps] update `@ljharb/eslint-config` [`51c946c`](https://github.com/inspect-js/has-symbols/commit/51c946c7f6baa793ec5390bb5a45cdce16b4ba76) + +## [v1.0.1](https://github.com/inspect-js/has-symbols/compare/v1.0.0...v1.0.1) - 2019-11-16 + +### Commits + +- [Tests] use shared travis-ci configs [`ce396c9`](https://github.com/inspect-js/has-symbols/commit/ce396c9419ff11c43d0da5d05cdbb79f7fb42229) +- [Tests] up to `node` `v12.4`, `v11.15`, `v10.15`, `v9.11`, `v8.15`, `v7.10`, `v6.17`, `v4.9`; use `nvm install-latest-npm` [`0690732`](https://github.com/inspect-js/has-symbols/commit/0690732801f47ab429f39ba1962f522d5c462d6b) +- [meta] add `auto-changelog` [`2163d0b`](https://github.com/inspect-js/has-symbols/commit/2163d0b7f36343076b8f947cd1667dd1750f26fc) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `core-js`, `safe-publish-latest`, `tape` [`8e0951f`](https://github.com/inspect-js/has-symbols/commit/8e0951f1a7a2e52068222b7bb73511761e6e4d9c) +- [actions] add automatic rebasing / merge commit blocking [`b09cdb7`](https://github.com/inspect-js/has-symbols/commit/b09cdb7cd7ee39e7a769878f56e2d6066f5ccd1d) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `safe-publish-latest`, `core-js`, `get-own-property-symbols`, `tape` [`1dd42cd`](https://github.com/inspect-js/has-symbols/commit/1dd42cd86183ed0c50f99b1062345c458babca91) +- [meta] create FUNDING.yml [`aa57a17`](https://github.com/inspect-js/has-symbols/commit/aa57a17b19708906d1927f821ea8e73394d84ca4) +- Only apps should have lockfiles [`a2d8bea`](https://github.com/inspect-js/has-symbols/commit/a2d8bea23a97d15c09eaf60f5b107fcf9a4d57aa) +- [Tests] use `npx aud` instead of `nsp` or `npm audit` with hoops [`9e96cb7`](https://github.com/inspect-js/has-symbols/commit/9e96cb783746cbed0c10ef78e599a8eaa7ebe193) +- [meta] add `funding` field [`a0b32cf`](https://github.com/inspect-js/has-symbols/commit/a0b32cf68e803f963c1639b6d47b0a9d6440bab0) +- [Dev Deps] update `safe-publish-latest` [`cb9f0a5`](https://github.com/inspect-js/has-symbols/commit/cb9f0a521a3a1790f1064d437edd33bb6c3d6af0) + +## v1.0.0 - 2016-09-19 + +### Commits + +- Tests. [`ecb6eb9`](https://github.com/inspect-js/has-symbols/commit/ecb6eb934e4883137f3f93b965ba5e0a98df430d) +- package.json [`88a337c`](https://github.com/inspect-js/has-symbols/commit/88a337cee0864a0da35f5d19e69ff0ef0150e46a) +- Initial commit [`42e1e55`](https://github.com/inspect-js/has-symbols/commit/42e1e5502536a2b8ac529c9443984acd14836b1c) +- Initial implementation. [`33f5cc6`](https://github.com/inspect-js/has-symbols/commit/33f5cc6cdff86e2194b081ee842bfdc63caf43fb) +- read me [`01f1170`](https://github.com/inspect-js/has-symbols/commit/01f1170188ff7cb1558aa297f6ba5b516c6d7b0c) diff --git a/node_modules/has-symbols/LICENSE b/node_modules/has-symbols/LICENSE new file mode 100644 index 0000000..df31cbf --- /dev/null +++ b/node_modules/has-symbols/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/has-symbols/README.md b/node_modules/has-symbols/README.md new file mode 100644 index 0000000..33905f0 --- /dev/null +++ b/node_modules/has-symbols/README.md @@ -0,0 +1,46 @@ +# has-symbols [![Version Badge][2]][1] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +Determine if the JS environment has Symbol support. Supports spec, or shams. + +## Example + +```js +var hasSymbols = require('has-symbols'); + +hasSymbols() === true; // if the environment has native Symbol support. Not polyfillable, not forgeable. + +var hasSymbolsKinda = require('has-symbols/shams'); +hasSymbolsKinda() === true; // if the environment has a Symbol sham that mostly follows the spec. +``` + +## Supported Symbol shams + - get-own-property-symbols [npm](https://www.npmjs.com/package/get-own-property-symbols) | [github](https://github.com/WebReflection/get-own-property-symbols) + - core-js [npm](https://www.npmjs.com/package/core-js) | [github](https://github.com/zloirock/core-js) + +## Tests +Simply clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/has-symbols +[2]: https://versionbadg.es/inspect-js/has-symbols.svg +[5]: https://david-dm.org/inspect-js/has-symbols.svg +[6]: https://david-dm.org/inspect-js/has-symbols +[7]: https://david-dm.org/inspect-js/has-symbols/dev-status.svg +[8]: https://david-dm.org/inspect-js/has-symbols#info=devDependencies +[11]: https://nodei.co/npm/has-symbols.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/has-symbols.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/has-symbols.svg +[downloads-url]: https://npm-stat.com/charts.html?package=has-symbols +[codecov-image]: https://codecov.io/gh/inspect-js/has-symbols/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/inspect-js/has-symbols/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/has-symbols +[actions-url]: https://github.com/inspect-js/has-symbols/actions diff --git a/node_modules/has-symbols/index.js b/node_modules/has-symbols/index.js new file mode 100644 index 0000000..17044fa --- /dev/null +++ b/node_modules/has-symbols/index.js @@ -0,0 +1,13 @@ +'use strict'; + +var origSymbol = typeof Symbol !== 'undefined' && Symbol; +var hasSymbolSham = require('./shams'); + +module.exports = function hasNativeSymbols() { + if (typeof origSymbol !== 'function') { return false; } + if (typeof Symbol !== 'function') { return false; } + if (typeof origSymbol('foo') !== 'symbol') { return false; } + if (typeof Symbol('bar') !== 'symbol') { return false; } + + return hasSymbolSham(); +}; diff --git a/node_modules/has-symbols/package.json b/node_modules/has-symbols/package.json new file mode 100644 index 0000000..fe7004a --- /dev/null +++ b/node_modules/has-symbols/package.json @@ -0,0 +1,101 @@ +{ + "name": "has-symbols", + "version": "1.0.3", + "description": "Determine if the JS environment has Symbol support. Supports spec, or shams.", + "main": "index.js", + "scripts": { + "prepublishOnly": "safe-publish-latest", + "prepublish": "not-in-publish || npm run prepublishOnly", + "pretest": "npm run --silent lint", + "test": "npm run tests-only", + "posttest": "aud --production", + "tests-only": "npm run test:stock && npm run test:staging && npm run test:shams", + "test:stock": "nyc node test", + "test:staging": "nyc node --harmony --es-staging test", + "test:shams": "npm run --silent test:shams:getownpropertysymbols && npm run --silent test:shams:corejs", + "test:shams:corejs": "nyc node test/shams/core-js.js", + "test:shams:getownpropertysymbols": "nyc node test/shams/get-own-property-symbols.js", + "lint": "eslint --ext=js,mjs .", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git://github.com/inspect-js/has-symbols.git" + }, + "keywords": [ + "Symbol", + "symbols", + "typeof", + "sham", + "polyfill", + "native", + "core-js", + "ES6" + ], + "author": { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/has-symbols/issues" + }, + "homepage": "https://github.com/ljharb/has-symbols#readme", + "devDependencies": { + "@ljharb/eslint-config": "^20.2.3", + "aud": "^2.0.0", + "auto-changelog": "^2.4.0", + "core-js": "^2.6.12", + "eslint": "=8.8.0", + "get-own-property-symbols": "^0.9.5", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "tape": "^5.5.2" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "iexplore/6.0..latest", + "firefox/3.0..6.0", + "firefox/15.0..latest", + "firefox/nightly", + "chrome/4.0..10.0", + "chrome/20.0..latest", + "chrome/canary", + "opera/10.0..latest", + "opera/next", + "safari/4.0..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2" + ] + }, + "engines": { + "node": ">= 0.4" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "greenkeeper": { + "ignore": [ + "core-js" + ] + } +} diff --git a/node_modules/has-symbols/shams.js b/node_modules/has-symbols/shams.js new file mode 100644 index 0000000..1285210 --- /dev/null +++ b/node_modules/has-symbols/shams.js @@ -0,0 +1,42 @@ +'use strict'; + +/* eslint complexity: [2, 18], max-statements: [2, 33] */ +module.exports = function hasSymbols() { + if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } + if (typeof Symbol.iterator === 'symbol') { return true; } + + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + if (typeof sym === 'string') { return false; } + + if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } + if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } + + // temp disabled per https://github.com/ljharb/object.assign/issues/17 + // if (sym instanceof Symbol) { return false; } + // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 + // if (!(symObj instanceof Symbol)) { return false; } + + // if (typeof Symbol.prototype.toString !== 'function') { return false; } + // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } + + var symVal = 42; + obj[sym] = symVal; + for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop + if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } + + if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } + + var syms = Object.getOwnPropertySymbols(obj); + if (syms.length !== 1 || syms[0] !== sym) { return false; } + + if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } + + if (typeof Object.getOwnPropertyDescriptor === 'function') { + var descriptor = Object.getOwnPropertyDescriptor(obj, sym); + if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } + } + + return true; +}; diff --git a/node_modules/has-symbols/test/index.js b/node_modules/has-symbols/test/index.js new file mode 100644 index 0000000..352129c --- /dev/null +++ b/node_modules/has-symbols/test/index.js @@ -0,0 +1,22 @@ +'use strict'; + +var test = require('tape'); +var hasSymbols = require('../'); +var runSymbolTests = require('./tests'); + +test('interface', function (t) { + t.equal(typeof hasSymbols, 'function', 'is a function'); + t.equal(typeof hasSymbols(), 'boolean', 'returns a boolean'); + t.end(); +}); + +test('Symbols are supported', { skip: !hasSymbols() }, function (t) { + runSymbolTests(t); + t.end(); +}); + +test('Symbols are not supported', { skip: hasSymbols() }, function (t) { + t.equal(typeof Symbol, 'undefined', 'global Symbol is undefined'); + t.equal(typeof Object.getOwnPropertySymbols, 'undefined', 'Object.getOwnPropertySymbols does not exist'); + t.end(); +}); diff --git a/node_modules/has-symbols/test/shams/core-js.js b/node_modules/has-symbols/test/shams/core-js.js new file mode 100644 index 0000000..df5365c --- /dev/null +++ b/node_modules/has-symbols/test/shams/core-js.js @@ -0,0 +1,28 @@ +'use strict'; + +var test = require('tape'); + +if (typeof Symbol === 'function' && typeof Symbol() === 'symbol') { + test('has native Symbol support', function (t) { + t.equal(typeof Symbol, 'function'); + t.equal(typeof Symbol(), 'symbol'); + t.end(); + }); + return; +} + +var hasSymbols = require('../../shams'); + +test('polyfilled Symbols', function (t) { + /* eslint-disable global-require */ + t.equal(hasSymbols(), false, 'hasSymbols is false before polyfilling'); + require('core-js/fn/symbol'); + require('core-js/fn/symbol/to-string-tag'); + + require('../tests')(t); + + var hasSymbolsAfter = hasSymbols(); + t.equal(hasSymbolsAfter, true, 'hasSymbols is true after polyfilling'); + /* eslint-enable global-require */ + t.end(); +}); diff --git a/node_modules/has-symbols/test/shams/get-own-property-symbols.js b/node_modules/has-symbols/test/shams/get-own-property-symbols.js new file mode 100644 index 0000000..9191b24 --- /dev/null +++ b/node_modules/has-symbols/test/shams/get-own-property-symbols.js @@ -0,0 +1,28 @@ +'use strict'; + +var test = require('tape'); + +if (typeof Symbol === 'function' && typeof Symbol() === 'symbol') { + test('has native Symbol support', function (t) { + t.equal(typeof Symbol, 'function'); + t.equal(typeof Symbol(), 'symbol'); + t.end(); + }); + return; +} + +var hasSymbols = require('../../shams'); + +test('polyfilled Symbols', function (t) { + /* eslint-disable global-require */ + t.equal(hasSymbols(), false, 'hasSymbols is false before polyfilling'); + + require('get-own-property-symbols'); + + require('../tests')(t); + + var hasSymbolsAfter = hasSymbols(); + t.equal(hasSymbolsAfter, true, 'hasSymbols is true after polyfilling'); + /* eslint-enable global-require */ + t.end(); +}); diff --git a/node_modules/has-symbols/test/tests.js b/node_modules/has-symbols/test/tests.js new file mode 100644 index 0000000..89edd12 --- /dev/null +++ b/node_modules/has-symbols/test/tests.js @@ -0,0 +1,56 @@ +'use strict'; + +// eslint-disable-next-line consistent-return +module.exports = function runSymbolTests(t) { + t.equal(typeof Symbol, 'function', 'global Symbol is a function'); + + if (typeof Symbol !== 'function') { return false; } + + t.notEqual(Symbol(), Symbol(), 'two symbols are not equal'); + + /* + t.equal( + Symbol.prototype.toString.call(Symbol('foo')), + Symbol.prototype.toString.call(Symbol('foo')), + 'two symbols with the same description stringify the same' + ); + */ + + /* + var foo = Symbol('foo'); + + t.notEqual( + String(foo), + String(Symbol('bar')), + 'two symbols with different descriptions do not stringify the same' + ); + */ + + t.equal(typeof Symbol.prototype.toString, 'function', 'Symbol#toString is a function'); + // t.equal(String(foo), Symbol.prototype.toString.call(foo), 'Symbol#toString equals String of the same symbol'); + + t.equal(typeof Object.getOwnPropertySymbols, 'function', 'Object.getOwnPropertySymbols is a function'); + + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + t.notEqual(typeof sym, 'string', 'Symbol is not a string'); + t.equal(Object.prototype.toString.call(sym), '[object Symbol]', 'symbol primitive Object#toStrings properly'); + t.equal(Object.prototype.toString.call(symObj), '[object Symbol]', 'symbol primitive Object#toStrings properly'); + + var symVal = 42; + obj[sym] = symVal; + // eslint-disable-next-line no-restricted-syntax + for (sym in obj) { t.fail('symbol property key was found in for..in of object'); } + + t.deepEqual(Object.keys(obj), [], 'no enumerable own keys on symbol-valued object'); + t.deepEqual(Object.getOwnPropertyNames(obj), [], 'no own names on symbol-valued object'); + t.deepEqual(Object.getOwnPropertySymbols(obj), [sym], 'one own symbol on symbol-valued object'); + t.equal(Object.prototype.propertyIsEnumerable.call(obj, sym), true, 'symbol is enumerable'); + t.deepEqual(Object.getOwnPropertyDescriptor(obj, sym), { + configurable: true, + enumerable: true, + value: 42, + writable: true + }, 'property descriptor is correct'); +}; diff --git a/node_modules/has/LICENSE-MIT b/node_modules/has/LICENSE-MIT new file mode 100644 index 0000000..ae7014d --- /dev/null +++ b/node_modules/has/LICENSE-MIT @@ -0,0 +1,22 @@ +Copyright (c) 2013 Thiago de Arruda + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/has/README.md b/node_modules/has/README.md new file mode 100644 index 0000000..635e3a4 --- /dev/null +++ b/node_modules/has/README.md @@ -0,0 +1,18 @@ +# has + +> Object.prototype.hasOwnProperty.call shortcut + +## Installation + +```sh +npm install --save has +``` + +## Usage + +```js +var has = require('has'); + +has({}, 'hasOwnProperty'); // false +has(Object.prototype, 'hasOwnProperty'); // true +``` diff --git a/node_modules/has/package.json b/node_modules/has/package.json new file mode 100644 index 0000000..7c4592f --- /dev/null +++ b/node_modules/has/package.json @@ -0,0 +1,48 @@ +{ + "name": "has", + "description": "Object.prototype.hasOwnProperty.call shortcut", + "version": "1.0.3", + "homepage": "https://github.com/tarruda/has", + "author": { + "name": "Thiago de Arruda", + "email": "tpadilha84@gmail.com" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/tarruda/has.git" + }, + "bugs": { + "url": "https://github.com/tarruda/has/issues" + }, + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/tarruda/has/blob/master/LICENSE-MIT" + } + ], + "main": "./src", + "dependencies": { + "function-bind": "^1.1.1" + }, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "eslint": "^4.19.1", + "tape": "^4.9.0" + }, + "engines": { + "node": ">= 0.4.0" + }, + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "tape test" + } +} diff --git a/node_modules/has/src/index.js b/node_modules/has/src/index.js new file mode 100644 index 0000000..dd92dd9 --- /dev/null +++ b/node_modules/has/src/index.js @@ -0,0 +1,5 @@ +'use strict'; + +var bind = require('function-bind'); + +module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); diff --git a/node_modules/has/test/index.js b/node_modules/has/test/index.js new file mode 100644 index 0000000..43d480b --- /dev/null +++ b/node_modules/has/test/index.js @@ -0,0 +1,10 @@ +'use strict'; + +var test = require('tape'); +var has = require('../'); + +test('has', function (t) { + t.equal(has({}, 'hasOwnProperty'), false, 'object literal does not have own property "hasOwnProperty"'); + t.equal(has(Object.prototype, 'hasOwnProperty'), true, 'Object.prototype has own property "hasOwnProperty"'); + t.end(); +}); diff --git a/node_modules/hexoid/dist/index.js b/node_modules/hexoid/dist/index.js new file mode 100644 index 0000000..a867b57 --- /dev/null +++ b/node_modules/hexoid/dist/index.js @@ -0,0 +1,15 @@ +var IDX=256, HEX=[]; +while (IDX--) HEX[IDX] = (IDX + 256).toString(16).substring(1); + +module.exports = function (len) { + len = len || 16; + var str='', num=0; + return function () { + if (!str || num === 256) { + str=''; num=(1+len)/2 | 0; + while (num--) str += HEX[256 * Math.random() | 0]; + str = str.substring(num=0, len-2); + } + return str + HEX[num++]; + }; +} diff --git a/node_modules/hexoid/dist/index.min.js b/node_modules/hexoid/dist/index.min.js new file mode 100644 index 0000000..4df2f2d --- /dev/null +++ b/node_modules/hexoid/dist/index.min.js @@ -0,0 +1 @@ +!function(n,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):n.hexoid=e()}(this,(function(){for(var n=256,e=[];n--;)e[n]=(n+256).toString(16).substring(1);return function(n){n=n||16;var t="",o=0;return function(){if(!t||256===o){for(t="",o=(1+n)/2|0;o--;)t+=e[256*Math.random()|0];t=t.substring(o=0,n-2)}return t+e[o++]}}})); \ No newline at end of file diff --git a/node_modules/hexoid/dist/index.mjs b/node_modules/hexoid/dist/index.mjs new file mode 100644 index 0000000..b5e3fea --- /dev/null +++ b/node_modules/hexoid/dist/index.mjs @@ -0,0 +1,15 @@ +var IDX=256, HEX=[]; +while (IDX--) HEX[IDX] = (IDX + 256).toString(16).substring(1); + +export default function (len) { + len = len || 16; + var str='', num=0; + return function () { + if (!str || num === 256) { + str=''; num=(1+len)/2 | 0; + while (num--) str += HEX[256 * Math.random() | 0]; + str = str.substring(num=0, len-2); + } + return str + HEX[num++]; + }; +} diff --git a/node_modules/hexoid/hexoid.d.ts b/node_modules/hexoid/hexoid.d.ts new file mode 100644 index 0000000..9b03069 --- /dev/null +++ b/node_modules/hexoid/hexoid.d.ts @@ -0,0 +1 @@ +export default function (len?: number): () => string; diff --git a/node_modules/hexoid/license b/node_modules/hexoid/license new file mode 100644 index 0000000..fa6089f --- /dev/null +++ b/node_modules/hexoid/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Luke Edwards (lukeed.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/hexoid/package.json b/node_modules/hexoid/package.json new file mode 100644 index 0000000..df7097c --- /dev/null +++ b/node_modules/hexoid/package.json @@ -0,0 +1,41 @@ +{ + "name": "hexoid", + "version": "1.0.0", + "repository": "lukeed/hexoid", + "description": "A tiny (190B) and extremely fast utility to generate random IDs of fixed length", + "unpkg": "dist/index.min.js", + "module": "dist/index.mjs", + "main": "dist/index.js", + "types": "hexoid.d.ts", + "license": "MIT", + "author": { + "name": "Luke Edwards", + "email": "luke.edwards05@gmail.com", + "url": "https://lukeed.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "build": "bundt", + "pretest": "npm run build", + "test": "tape -r esm test/*.js | tap-spec" + }, + "files": [ + "*.d.ts", + "dist" + ], + "keywords": [ + "id", + "uid", + "uuid", + "random", + "generate" + ], + "devDependencies": { + "bundt": "1.0.0", + "esm": "3.2.25", + "tap-spec": "5.0.0", + "tape": "4.9.1" + } +} diff --git a/node_modules/hexoid/readme.md b/node_modules/hexoid/readme.md new file mode 100644 index 0000000..b5a48fa --- /dev/null +++ b/node_modules/hexoid/readme.md @@ -0,0 +1,119 @@ +# hexoid [![build status](https://badgen.net/github/status/lukeed/hexoid)](https://github.com/lukeed/hexoid/actions) [![codecov](https://badgen.now.sh/codecov/c/github/lukeed/hexoid)](https://codecov.io/gh/lukeed/hexoid) + +> A tiny (190B) and [extremely fast](#benchmarks) utility to generate random IDs of fixed length + +_**Hexadecimal object IDs.** Available for Node.js and the browser._
Generate randomized output strings of fixed length using lowercased hexadecimal pairs. + +> **Notice:** Please note that this is not a cryptographically secure (CSPRNG) generator. + +Additionally, this module is delivered as: + +* **CommonJS**: [`dist/index.js`](https://unpkg.com/hexoid/dist/index.js) +* **ES Module**: [`dist/index.mjs`](https://unpkg.com/hexoid/dist/index.mjs) +* **UMD**: [`dist/index.min.js`](https://unpkg.com/hexoid/dist/index.min.js) + +## Install + +``` +$ npm install --save hexoid +``` + + +## Usage + +```js +import hexoid from 'hexoid'; + +const toID = hexoid(); +// length = 16 (default) +toID(); //=> '52032fedb951da00' +toID(); //=> '52032fedb951da01' +toID(); //=> '52032fedb951da02' + +// customize length +hexoid(25)(); //=> '065359875047c63a037200e00' +hexoid(32)(); //=> 'ca8e4aec7f139d94fcab9cab2eb89f00' +hexoid(48)(); //=> 'c19a4deb5cdeca68534930e67bd0a2f4ed45988724d8d200' +``` + + +## API + +### hexoid(length?) +Returns: `() => string` + +Creates the function that will generate strings. + +#### length +Type: `Number`
+Default: `16` + +Then length of the output string. + +> **Important:** Your risk of collisions decreases with longer strings!
Please be aware of the [Birthday Problem](https://betterexplained.com/articles/understanding-the-birthday-paradox/)! You may need more combinations than you'd expect. + +The **maximum combinations** are known given the following formula: + +```js +const combos = 256 ** (len/2); +``` + + +## Benchmarks + +> Running on Node.js v10.13.0 + +``` +Validation (length = 16): + ✔ hashids/fixed (example: "LkQWjnegYbwZ1p0G") + ✔ nanoid/non-secure (example: "sLlVL5X3M5k2fo58") + ✔ uid (example: "3d0ckwcnjiuu91hj") + ✔ hexoid (example: "de96b62e663ef300") +Benchmark (length = 16): + hashids/fixed x 349,462 ops/sec ±0.28% (93 runs sampled) + nanoid/non-secure x 3,337,573 ops/sec ±0.28% (96 runs sampled) + uid x 3,553,482 ops/sec ±0.51% (90 runs sampled) + hexoid x 81,081,364 ops/sec ±0.18% (96 runs sampled) + + +Validation (length = 25): + ✔ cuid (example: "ck7lj5hbf00000v7c9gox6yfh") + ✔ hashids/fixed (example: "r9JOyLkQWjnegYbwZ1p0GDXNm") + ✔ nanoid/non-secure (example: "hI202PVPJQRNrP6o6z4pXz4m0") + ✔ uid (example: "9904e9w130buxaw7n8358mn2f") + ✔ hexoid (example: "01dfab2c14e37768eb7605a00") +Benchmark (length = 25): + cuid x 161,636 ops/sec ±1.36% (89 runs sampled) + hashids/fixed x 335,439 ops/sec ±2.40% (94 runs sampled) + nanoid/non-secure x 2,254,073 ops/sec ±0.23% (96 runs sampled) + uid x 2,483,275 ops/sec ±0.38% (95 runs sampled) + hexoid x 75,715,843 ops/sec ±0.27% (95 runs sampled) + + +Validation (length = 36): + ✔ uuid/v1 (example: "c3dc1ed0-629a-11ea-8bfb-8ffc49585f54") + ✔ uuid/v4 (example: "8c89f0ca-f01e-4c84-bd71-e645bab84552") + ✔ hashids/fixed (example: "EVq3Pr9JOyLkQWjnegYbwZ1p0GDXNmRBlAxg") + ✔ @lukeed/uuid (example: "069ad676-48f9-4452-b11d-f20c3872dc1f") + ✔ nanoid/non-secure (example: "jAZjrcDmHH6P1rT9EFdCdHUpF440SjAKwb2A") + ✔ uid (example: "5mhi30lgy5d0glmuy81llelbzdko518ow1sx") + ✔ hexoid (example: "615209331f0b4630acf69999ccfc95a23200") +Benchmark (length = 36): + uuid/v1 x 1,487,947 ops/sec ±0.18% (98 runs sampled) + uuid/v4 x 334,868 ops/sec ±1.08% (90 runs sampled) + @lukeed/uuid x 6,352,445 ops/sec ±0.27% (91 runs sampled) + hashids/fixed x 322,914 ops/sec ±0.27% (93 runs sampled) + nanoid/non-secure x 1,592,708 ops/sec ±0.25% (91 runs sampled) + uid x 1,789,492 ops/sec ±0.29% (92 runs sampled) + hexoid x 71,746,692 ops/sec ±0.29% (93 runs sampled) +``` + +## Related + +- [uid](https://github.com/lukeed/uid) - A smaller (134B) but slower variant of this module with a different API +- [@lukeed/uuid](https://github.com/lukeed/uuid) - A tiny (230B), fast, and cryptographically secure UUID (V4) generator for Node and the browser + + +## License + +MIT © [Luke Edwards](https://lukeed.com) diff --git a/node_modules/indent-string/index.d.ts b/node_modules/indent-string/index.d.ts new file mode 100644 index 0000000..1185231 --- /dev/null +++ b/node_modules/indent-string/index.d.ts @@ -0,0 +1,42 @@ +declare namespace indentString { + interface Options { + /** + The string to use for the indent. + + @default ' ' + */ + readonly indent?: string; + + /** + Also indent empty lines. + + @default false + */ + readonly includeEmptyLines?: boolean; + } +} + +/** +Indent each line in a string. + +@param string - The string to indent. +@param count - How many times you want `options.indent` repeated. Default: `1`. + +@example +``` +import indentString = require('indent-string'); + +indentString('Unicorns\nRainbows', 4); +//=> ' Unicorns\n Rainbows' + +indentString('Unicorns\nRainbows', 4, {indent: '♥'}); +//=> '♥♥♥♥Unicorns\n♥♥♥♥Rainbows' +``` +*/ +declare function indentString( + string: string, + count?: number, + options?: indentString.Options +): string; + +export = indentString; diff --git a/node_modules/indent-string/index.js b/node_modules/indent-string/index.js new file mode 100644 index 0000000..e1ab804 --- /dev/null +++ b/node_modules/indent-string/index.js @@ -0,0 +1,35 @@ +'use strict'; + +module.exports = (string, count = 1, options) => { + options = { + indent: ' ', + includeEmptyLines: false, + ...options + }; + + if (typeof string !== 'string') { + throw new TypeError( + `Expected \`input\` to be a \`string\`, got \`${typeof string}\`` + ); + } + + if (typeof count !== 'number') { + throw new TypeError( + `Expected \`count\` to be a \`number\`, got \`${typeof count}\`` + ); + } + + if (typeof options.indent !== 'string') { + throw new TypeError( + `Expected \`options.indent\` to be a \`string\`, got \`${typeof options.indent}\`` + ); + } + + if (count === 0) { + return string; + } + + const regex = options.includeEmptyLines ? /^/gm : /^(?!\s*$)/gm; + + return string.replace(regex, options.indent.repeat(count)); +}; diff --git a/node_modules/indent-string/license b/node_modules/indent-string/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/indent-string/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/indent-string/package.json b/node_modules/indent-string/package.json new file mode 100644 index 0000000..497bb83 --- /dev/null +++ b/node_modules/indent-string/package.json @@ -0,0 +1,37 @@ +{ + "name": "indent-string", + "version": "4.0.0", + "description": "Indent each line in a string", + "license": "MIT", + "repository": "sindresorhus/indent-string", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "indent", + "string", + "pad", + "align", + "line", + "text", + "each", + "every" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/indent-string/readme.md b/node_modules/indent-string/readme.md new file mode 100644 index 0000000..49967de --- /dev/null +++ b/node_modules/indent-string/readme.md @@ -0,0 +1,70 @@ +# indent-string [![Build Status](https://travis-ci.org/sindresorhus/indent-string.svg?branch=master)](https://travis-ci.org/sindresorhus/indent-string) + +> Indent each line in a string + + +## Install + +``` +$ npm install indent-string +``` + + +## Usage + +```js +const indentString = require('indent-string'); + +indentString('Unicorns\nRainbows', 4); +//=> ' Unicorns\n Rainbows' + +indentString('Unicorns\nRainbows', 4, {indent: '♥'}); +//=> '♥♥♥♥Unicorns\n♥♥♥♥Rainbows' +``` + + +## API + +### indentString(string, [count], [options]) + +#### string + +Type: `string` + +The string to indent. + +#### count + +Type: `number`
+Default: `1` + +How many times you want `options.indent` repeated. + +#### options + +Type: `object` + +##### indent + +Type: `string`
+Default: `' '` + +The string to use for the indent. + +##### includeEmptyLines + +Type: `boolean`
+Default: `false` + +Also indent empty lines. + + +## Related + +- [indent-string-cli](https://github.com/sindresorhus/indent-string-cli) - CLI for this module +- [strip-indent](https://github.com/sindresorhus/strip-indent) - Strip leading whitespace from every line in a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js new file mode 100644 index 0000000..f71f2d9 --- /dev/null +++ b/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..86bbb3d --- /dev/null +++ b/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json new file mode 100644 index 0000000..37b4366 --- /dev/null +++ b/node_modules/inherits/package.json @@ -0,0 +1,29 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.4", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "tap" + }, + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] +} diff --git a/node_modules/lru-cache/LICENSE b/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/lru-cache/README.md b/node_modules/lru-cache/README.md new file mode 100644 index 0000000..435dfeb --- /dev/null +++ b/node_modules/lru-cache/README.md @@ -0,0 +1,166 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/node_modules/lru-cache/index.js b/node_modules/lru-cache/index.js new file mode 100644 index 0000000..573b6b8 --- /dev/null +++ b/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json new file mode 100644 index 0000000..43b7502 --- /dev/null +++ b/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/methods/HISTORY.md b/node_modules/methods/HISTORY.md new file mode 100644 index 0000000..c0ecf07 --- /dev/null +++ b/node_modules/methods/HISTORY.md @@ -0,0 +1,29 @@ +1.1.2 / 2016-01-17 +================== + + * perf: enable strict mode + +1.1.1 / 2014-12-30 +================== + + * Improve `browserify` support + +1.1.0 / 2014-07-05 +================== + + * Add `CONNECT` method + +1.0.1 / 2014-06-02 +================== + + * Fix module to work with harmony transform + +1.0.0 / 2014-05-08 +================== + + * Add `PURGE` method + +0.1.0 / 2013-10-28 +================== + + * Add `http.METHODS` support diff --git a/node_modules/methods/LICENSE b/node_modules/methods/LICENSE new file mode 100644 index 0000000..220dc1a --- /dev/null +++ b/node_modules/methods/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2013-2014 TJ Holowaychuk +Copyright (c) 2015-2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/methods/README.md b/node_modules/methods/README.md new file mode 100644 index 0000000..672a32b --- /dev/null +++ b/node_modules/methods/README.md @@ -0,0 +1,51 @@ +# Methods + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +HTTP verbs that Node.js core's HTTP parser supports. + +This module provides an export that is just like `http.METHODS` from Node.js core, +with the following differences: + + * All method names are lower-cased. + * Contains a fallback list of methods for Node.js versions that do not have a + `http.METHODS` export (0.10 and lower). + * Provides the fallback list when using tools like `browserify` without pulling + in the `http` shim module. + +## Install + +```bash +$ npm install methods +``` + +## API + +```js +var methods = require('methods') +``` + +### methods + +This is an array of lower-cased method names that Node.js supports. If Node.js +provides the `http.METHODS` export, then this is the same array lower-cased, +otherwise it is a snapshot of the verbs from Node.js 0.10. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/methods.svg?style=flat +[npm-url]: https://npmjs.org/package/methods +[node-version-image]: https://img.shields.io/node/v/methods.svg?style=flat +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/methods.svg?style=flat +[travis-url]: https://travis-ci.org/jshttp/methods +[coveralls-image]: https://img.shields.io/coveralls/jshttp/methods.svg?style=flat +[coveralls-url]: https://coveralls.io/r/jshttp/methods?branch=master +[downloads-image]: https://img.shields.io/npm/dm/methods.svg?style=flat +[downloads-url]: https://npmjs.org/package/methods diff --git a/node_modules/methods/index.js b/node_modules/methods/index.js new file mode 100644 index 0000000..667a50b --- /dev/null +++ b/node_modules/methods/index.js @@ -0,0 +1,69 @@ +/*! + * methods + * Copyright(c) 2013-2014 TJ Holowaychuk + * Copyright(c) 2015-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var http = require('http'); + +/** + * Module exports. + * @public + */ + +module.exports = getCurrentNodeMethods() || getBasicNodeMethods(); + +/** + * Get the current Node.js methods. + * @private + */ + +function getCurrentNodeMethods() { + return http.METHODS && http.METHODS.map(function lowerCaseMethod(method) { + return method.toLowerCase(); + }); +} + +/** + * Get the "basic" Node.js methods, a snapshot from Node.js 0.10. + * @private + */ + +function getBasicNodeMethods() { + return [ + 'get', + 'post', + 'put', + 'head', + 'delete', + 'options', + 'trace', + 'copy', + 'lock', + 'mkcol', + 'move', + 'purge', + 'propfind', + 'proppatch', + 'unlock', + 'report', + 'mkactivity', + 'checkout', + 'merge', + 'm-search', + 'notify', + 'subscribe', + 'unsubscribe', + 'patch', + 'search', + 'connect' + ]; +} diff --git a/node_modules/methods/package.json b/node_modules/methods/package.json new file mode 100644 index 0000000..c4ce6f0 --- /dev/null +++ b/node_modules/methods/package.json @@ -0,0 +1,36 @@ +{ + "name": "methods", + "description": "HTTP methods that node supports", + "version": "1.1.2", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "TJ Holowaychuk (http://tjholowaychuk.com)" + ], + "license": "MIT", + "repository": "jshttp/methods", + "devDependencies": { + "istanbul": "0.4.1", + "mocha": "1.21.5" + }, + "files": [ + "index.js", + "HISTORY.md", + "LICENSE" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "browser": { + "http": false + }, + "keywords": [ + "http", + "methods" + ] +} diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md new file mode 100644 index 0000000..7436f64 --- /dev/null +++ b/node_modules/mime-db/HISTORY.md @@ -0,0 +1,507 @@ +1.52.0 / 2022-02-21 +=================== + + * Add extensions from IANA for more `image/*` types + * Add extension `.asc` to `application/pgp-keys` + * Add extensions to various XML types + * Add new upstream MIME types + +1.51.0 / 2021-11-08 +=================== + + * Add new upstream MIME types + * Mark `image/vnd.microsoft.icon` as compressible + * Mark `image/vnd.ms-dds` as compressible + +1.50.0 / 2021-09-15 +=================== + + * Add deprecated iWorks mime types and extensions + * Add new upstream MIME types + +1.49.0 / 2021-07-26 +=================== + + * Add extension `.trig` to `application/trig` + * Add new upstream MIME types + +1.48.0 / 2021-05-30 +=================== + + * Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + * Add new upstream MIME types + * Mark `text/yaml` as compressible + +1.47.0 / 2021-04-01 +=================== + + * Add new upstream MIME types + * Remove ambigious extensions from IANA for `application/*+xml` types + * Update primary extension to `.es` for `application/ecmascript` + +1.46.0 / 2021-02-13 +=================== + + * Add extension `.amr` to `audio/amr` + * Add extension `.m4s` to `video/iso.segment` + * Add extension `.opus` to `audio/ogg` + * Add new upstream MIME types + +1.45.0 / 2020-09-22 +=================== + + * Add `application/ubjson` with extension `.ubj` + * Add `image/avif` with extension `.avif` + * Add `image/ktx2` with extension `.ktx2` + * Add extension `.dbf` to `application/vnd.dbf` + * Add extension `.rar` to `application/vnd.rar` + * Add extension `.td` to `application/urc-targetdesc+xml` + * Add new upstream MIME types + * Fix extension of `application/vnd.apple.keynote` to be `.key` + +1.44.0 / 2020-04-22 +=================== + + * Add charsets from IANA + * Add extension `.cjs` to `application/node` + * Add new upstream MIME types + +1.43.0 / 2020-01-05 +=================== + + * Add `application/x-keepass2` with extension `.kdbx` + * Add extension `.mxmf` to `audio/mobile-xmf` + * Add extensions from IANA for `application/*+xml` types + * Add new upstream MIME types + +1.42.0 / 2019-09-25 +=================== + + * Add `image/vnd.ms-dds` with extension `.dds` + * Add new upstream MIME types + * Remove compressible from `multipart/mixed` + +1.41.0 / 2019-08-30 +=================== + + * Add new upstream MIME types + * Add `application/toml` with extension `.toml` + * Mark `font/ttf` as compressible + +1.40.0 / 2019-04-20 +=================== + + * Add extensions from IANA for `model/*` types + * Add `text/mdx` with extension `.mdx` + +1.39.0 / 2019-04-04 +=================== + + * Add extensions `.siv` and `.sieve` to `application/sieve` + * Add new upstream MIME types + +1.38.0 / 2019-02-04 +=================== + + * Add extension `.nq` to `application/n-quads` + * Add extension `.nt` to `application/n-triples` + * Add new upstream MIME types + * Mark `text/less` as compressible + +1.37.0 / 2018-10-19 +=================== + + * Add extensions to HEIC image types + * Add new upstream MIME types + +1.36.0 / 2018-08-20 +=================== + + * Add Apple file extensions from IANA + * Add extensions from IANA for `image/*` types + * Add new upstream MIME types + +1.35.0 / 2018-07-15 +=================== + + * Add extension `.owl` to `application/rdf+xml` + * Add new upstream MIME types + - Removes extension `.woff` from `application/font-woff` + +1.34.0 / 2018-06-03 +=================== + + * Add extension `.csl` to `application/vnd.citationstyles.style+xml` + * Add extension `.es` to `application/ecmascript` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/turtle` + * Mark all XML-derived types as compressible + +1.33.0 / 2018-02-15 +=================== + + * Add extensions from IANA for `message/*` types + * Add new upstream MIME types + * Fix some incorrect OOXML types + * Remove `application/font-woff2` + +1.32.0 / 2017-11-29 +=================== + + * Add new upstream MIME types + * Update `text/hjson` to registered `application/hjson` + * Add `text/shex` with extension `.shex` + +1.31.0 / 2017-10-25 +=================== + + * Add `application/raml+yaml` with extension `.raml` + * Add `application/wasm` with extension `.wasm` + * Add new `font` type from IANA + * Add new upstream font extensions + * Add new upstream MIME types + * Add extensions for JPEG-2000 images + +1.30.0 / 2017-08-27 +=================== + + * Add `application/vnd.ms-outlook` + * Add `application/x-arj` + * Add extension `.mjs` to `application/javascript` + * Add glTF types and extensions + * Add new upstream MIME types + * Add `text/x-org` + * Add VirtualBox MIME types + * Fix `source` records for `video/*` types that are IANA + * Update `font/opentype` to registered `font/otf` + +1.29.0 / 2017-07-10 +=================== + + * Add `application/fido.trusted-apps+json` + * Add extension `.wadl` to `application/vnd.sun.wadl+xml` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/css` + +1.28.0 / 2017-05-14 +=================== + + * Add new upstream MIME types + * Add extension `.gz` to `application/gzip` + * Update extensions `.md` and `.markdown` to be `text/markdown` + +1.27.0 / 2017-03-16 +=================== + + * Add new upstream MIME types + * Add `image/apng` with extension `.apng` + +1.26.0 / 2017-01-14 +=================== + + * Add new upstream MIME types + * Add extension `.geojson` to `application/geo+json` + +1.25.0 / 2016-11-11 +=================== + + * Add new upstream MIME types + +1.24.0 / 2016-09-18 +=================== + + * Add `audio/mp3` + * Add new upstream MIME types + +1.23.0 / 2016-05-01 +=================== + + * Add new upstream MIME types + * Add extension `.3gpp` to `audio/3gpp` + +1.22.0 / 2016-02-15 +=================== + + * Add `text/slim` + * Add extension `.rng` to `application/xml` + * Add new upstream MIME types + * Fix extension of `application/dash+xml` to be `.mpd` + * Update primary extension to `.m4a` for `audio/mp4` + +1.21.0 / 2016-01-06 +=================== + + * Add Google document types + * Add new upstream MIME types + +1.20.0 / 2015-11-10 +=================== + + * Add `text/x-suse-ymp` + * Add new upstream MIME types + +1.19.0 / 2015-09-17 +=================== + + * Add `application/vnd.apple.pkpass` + * Add new upstream MIME types + +1.18.0 / 2015-09-03 +=================== + + * Add new upstream MIME types + +1.17.0 / 2015-08-13 +=================== + + * Add `application/x-msdos-program` + * Add `audio/g711-0` + * Add `image/vnd.mozilla.apng` + * Add extension `.exe` to `application/x-msdos-program` + +1.16.0 / 2015-07-29 +=================== + + * Add `application/vnd.uri-map` + +1.15.0 / 2015-07-13 +=================== + + * Add `application/x-httpd-php` + +1.14.0 / 2015-06-25 +=================== + + * Add `application/scim+json` + * Add `application/vnd.3gpp.ussd+xml` + * Add `application/vnd.biopax.rdf+xml` + * Add `text/x-processing` + +1.13.0 / 2015-06-07 +=================== + + * Add nginx as a source + * Add `application/x-cocoa` + * Add `application/x-java-archive-diff` + * Add `application/x-makeself` + * Add `application/x-perl` + * Add `application/x-pilot` + * Add `application/x-redhat-package-manager` + * Add `application/x-sea` + * Add `audio/x-m4a` + * Add `audio/x-realaudio` + * Add `image/x-jng` + * Add `text/mathml` + +1.12.0 / 2015-06-05 +=================== + + * Add `application/bdoc` + * Add `application/vnd.hyperdrive+json` + * Add `application/x-bdoc` + * Add extension `.rtf` to `text/rtf` + +1.11.0 / 2015-05-31 +=================== + + * Add `audio/wav` + * Add `audio/wave` + * Add extension `.litcoffee` to `text/coffeescript` + * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` + * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` + +1.10.0 / 2015-05-19 +=================== + + * Add `application/vnd.balsamiq.bmpr` + * Add `application/vnd.microsoft.portable-executable` + * Add `application/x-ns-proxy-autoconfig` + +1.9.1 / 2015-04-19 +================== + + * Remove `.json` extension from `application/manifest+json` + - This is causing bugs downstream + +1.9.0 / 2015-04-19 +================== + + * Add `application/manifest+json` + * Add `application/vnd.micro+json` + * Add `image/vnd.zbrush.pcx` + * Add `image/x-ms-bmp` + +1.8.0 / 2015-03-13 +================== + + * Add `application/vnd.citationstyles.style+xml` + * Add `application/vnd.fastcopy-disk-image` + * Add `application/vnd.gov.sk.xmldatacontainer+xml` + * Add extension `.jsonld` to `application/ld+json` + +1.7.0 / 2015-02-08 +================== + + * Add `application/vnd.gerber` + * Add `application/vnd.msa-disk-image` + +1.6.1 / 2015-02-05 +================== + + * Community extensions ownership transferred from `node-mime` + +1.6.0 / 2015-01-29 +================== + + * Add `application/jose` + * Add `application/jose+json` + * Add `application/json-seq` + * Add `application/jwk+json` + * Add `application/jwk-set+json` + * Add `application/jwt` + * Add `application/rdap+json` + * Add `application/vnd.gov.sk.e-form+xml` + * Add `application/vnd.ims.imsccv1p3` + +1.5.0 / 2014-12-30 +================== + + * Add `application/vnd.oracle.resource+json` + * Fix various invalid MIME type entries + - `application/mbox+xml` + - `application/oscp-response` + - `application/vwg-multiplexed` + - `audio/g721` + +1.4.0 / 2014-12-21 +================== + + * Add `application/vnd.ims.imsccv1p2` + * Fix various invalid MIME type entries + - `application/vnd-acucobol` + - `application/vnd-curl` + - `application/vnd-dart` + - `application/vnd-dxr` + - `application/vnd-fdf` + - `application/vnd-mif` + - `application/vnd-sema` + - `application/vnd-wap-wmlc` + - `application/vnd.adobe.flash-movie` + - `application/vnd.dece-zip` + - `application/vnd.dvb_service` + - `application/vnd.micrografx-igx` + - `application/vnd.sealed-doc` + - `application/vnd.sealed-eml` + - `application/vnd.sealed-mht` + - `application/vnd.sealed-ppt` + - `application/vnd.sealed-tiff` + - `application/vnd.sealed-xls` + - `application/vnd.sealedmedia.softseal-html` + - `application/vnd.sealedmedia.softseal-pdf` + - `application/vnd.wap-slc` + - `application/vnd.wap-wbxml` + - `audio/vnd.sealedmedia.softseal-mpeg` + - `image/vnd-djvu` + - `image/vnd-svf` + - `image/vnd-wap-wbmp` + - `image/vnd.sealed-png` + - `image/vnd.sealedmedia.softseal-gif` + - `image/vnd.sealedmedia.softseal-jpg` + - `model/vnd-dwf` + - `model/vnd.parasolid.transmit-binary` + - `model/vnd.parasolid.transmit-text` + - `text/vnd-a` + - `text/vnd-curl` + - `text/vnd.wap-wml` + * Remove example template MIME types + - `application/example` + - `audio/example` + - `image/example` + - `message/example` + - `model/example` + - `multipart/example` + - `text/example` + - `video/example` + +1.3.1 / 2014-12-16 +================== + + * Fix missing extensions + - `application/json5` + - `text/hjson` + +1.3.0 / 2014-12-07 +================== + + * Add `application/a2l` + * Add `application/aml` + * Add `application/atfx` + * Add `application/atxml` + * Add `application/cdfx+xml` + * Add `application/dii` + * Add `application/json5` + * Add `application/lxf` + * Add `application/mf4` + * Add `application/vnd.apache.thrift.compact` + * Add `application/vnd.apache.thrift.json` + * Add `application/vnd.coffeescript` + * Add `application/vnd.enphase.envoy` + * Add `application/vnd.ims.imsccv1p1` + * Add `text/csv-schema` + * Add `text/hjson` + * Add `text/markdown` + * Add `text/yaml` + +1.2.0 / 2014-11-09 +================== + + * Add `application/cea` + * Add `application/dit` + * Add `application/vnd.gov.sk.e-form+zip` + * Add `application/vnd.tmd.mediaflex.api+xml` + * Type `application/epub+zip` is now IANA-registered + +1.1.2 / 2014-10-23 +================== + + * Rebuild database for `application/x-www-form-urlencoded` change + +1.1.1 / 2014-10-20 +================== + + * Mark `application/x-www-form-urlencoded` as compressible. + +1.1.0 / 2014-09-28 +================== + + * Add `application/font-woff2` + +1.0.3 / 2014-09-25 +================== + + * Fix engine requirement in package + +1.0.2 / 2014-09-25 +================== + + * Add `application/coap-group+json` + * Add `application/dcd` + * Add `application/vnd.apache.thrift.binary` + * Add `image/vnd.tencent.tap` + * Mark all JSON-derived types as compressible + * Update `text/vtt` data + +1.0.1 / 2014-08-30 +================== + + * Fix extension ordering + +1.0.0 / 2014-08-30 +================== + + * Add `application/atf` + * Add `application/merge-patch+json` + * Add `multipart/x-mixed-replace` + * Add `source: 'apache'` metadata + * Add `source: 'iana'` metadata + * Remove badly-assumed charset data diff --git a/node_modules/mime-db/LICENSE b/node_modules/mime-db/LICENSE new file mode 100644 index 0000000..0751cb1 --- /dev/null +++ b/node_modules/mime-db/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015-2022 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md new file mode 100644 index 0000000..5a8fcfe --- /dev/null +++ b/node_modules/mime-db/README.md @@ -0,0 +1,100 @@ +# mime-db + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +This is a large database of mime types and information about them. +It consists of a single, public JSON file and does not include any logic, +allowing it to remain as un-opinionated as possible with an API. +It aggregates data from the following sources: + +- http://www.iana.org/assignments/media-types/media-types.xhtml +- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types + +## Installation + +```bash +npm install mime-db +``` + +### Database Download + +If you're crazy enough to use this in the browser, you can just grab the +JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to +replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) +as the JSON format may change in the future. + +``` +https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json +``` + +## Usage + +```js +var db = require('mime-db') + +// grab data on .js files +var data = db['application/javascript'] +``` + +## Data Structure + +The JSON file is a map lookup for lowercased mime types. +Each mime type has the following properties: + +- `.source` - where the mime type is defined. + If not set, it's probably a custom media type. + - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) + - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) + - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) +- `.extensions[]` - known extensions associated with this mime type. +- `.compressible` - whether a file of this type can be gzipped. +- `.charset` - the default charset associated with this type, if any. + +If unknown, every property could be `undefined`. + +## Contributing + +To edit the database, only make PRs against `src/custom-types.json` or +`src/custom-suffix.json`. + +The `src/custom-types.json` file is a JSON object with the MIME type as the +keys and the values being an object with the following keys: + +- `compressible` - leave out if you don't know, otherwise `true`/`false` to + indicate whether the data represented by the type is typically compressible. +- `extensions` - include an array of file extensions that are associated with + the type. +- `notes` - human-readable notes about the type, typically what the type is. +- `sources` - include an array of URLs of where the MIME type and the associated + extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); + links to type aggregating sites and Wikipedia are _not acceptable_. + +To update the build, run `npm run build`. + +### Adding Custom Media Types + +The best way to get new media types included in this library is to register +them with the IANA. The community registration procedure is outlined in +[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types +registered with the IANA are automatically pulled into this library. + +If that is not possible / feasible, they can be added directly here as a +"custom" type. To do this, it is required to have a primary source that +definitively lists the media type. If an extension is going to be listed as +associateed with this media type, the source must definitively link the +media type and extension as well. + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-db/master?label=ci +[ci-url]: https://github.com/jshttp/mime-db/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master +[node-image]: https://badgen.net/npm/node/mime-db +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-db +[npm-url]: https://npmjs.org/package/mime-db +[npm-version-image]: https://badgen.net/npm/v/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json new file mode 100644 index 0000000..eb9c42c --- /dev/null +++ b/node_modules/mime-db/db.json @@ -0,0 +1,8519 @@ +{ + "application/1d-interleaved-parityfec": { + "source": "iana" + }, + "application/3gpdash-qoe-report+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/3gpp-ims+xml": { + "source": "iana", + "compressible": true + }, + "application/3gpphal+json": { + "source": "iana", + "compressible": true + }, + "application/3gpphalforms+json": { + "source": "iana", + "compressible": true + }, + "application/a2l": { + "source": "iana" + }, + "application/ace+cbor": { + "source": "iana" + }, + "application/activemessage": { + "source": "iana" + }, + "application/activity+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-directory+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcost+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcostparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointprop+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointpropparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-error+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamcontrol+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamparams+json": { + "source": "iana", + "compressible": true + }, + "application/aml": { + "source": "iana" + }, + "application/andrew-inset": { + "source": "iana", + "extensions": ["ez"] + }, + "application/applefile": { + "source": "iana" + }, + "application/applixware": { + "source": "apache", + "extensions": ["aw"] + }, + "application/at+jwt": { + "source": "iana" + }, + "application/atf": { + "source": "iana" + }, + "application/atfx": { + "source": "iana" + }, + "application/atom+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atom"] + }, + "application/atomcat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomcat"] + }, + "application/atomdeleted+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomdeleted"] + }, + "application/atomicmail": { + "source": "iana" + }, + "application/atomsvc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomsvc"] + }, + "application/atsc-dwd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dwd"] + }, + "application/atsc-dynamic-event-message": { + "source": "iana" + }, + "application/atsc-held+xml": { + "source": "iana", + "compressible": true, + "extensions": ["held"] + }, + "application/atsc-rdt+json": { + "source": "iana", + "compressible": true + }, + "application/atsc-rsat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsat"] + }, + "application/atxml": { + "source": "iana" + }, + "application/auth-policy+xml": { + "source": "iana", + "compressible": true + }, + "application/bacnet-xdd+zip": { + "source": "iana", + "compressible": false + }, + "application/batch-smtp": { + "source": "iana" + }, + "application/bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/beep+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/calendar+json": { + "source": "iana", + "compressible": true + }, + "application/calendar+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xcs"] + }, + "application/call-completion": { + "source": "iana" + }, + "application/cals-1840": { + "source": "iana" + }, + "application/captive+json": { + "source": "iana", + "compressible": true + }, + "application/cbor": { + "source": "iana" + }, + "application/cbor-seq": { + "source": "iana" + }, + "application/cccex": { + "source": "iana" + }, + "application/ccmp+xml": { + "source": "iana", + "compressible": true + }, + "application/ccxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ccxml"] + }, + "application/cdfx+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdfx"] + }, + "application/cdmi-capability": { + "source": "iana", + "extensions": ["cdmia"] + }, + "application/cdmi-container": { + "source": "iana", + "extensions": ["cdmic"] + }, + "application/cdmi-domain": { + "source": "iana", + "extensions": ["cdmid"] + }, + "application/cdmi-object": { + "source": "iana", + "extensions": ["cdmio"] + }, + "application/cdmi-queue": { + "source": "iana", + "extensions": ["cdmiq"] + }, + "application/cdni": { + "source": "iana" + }, + "application/cea": { + "source": "iana" + }, + "application/cea-2018+xml": { + "source": "iana", + "compressible": true + }, + "application/cellml+xml": { + "source": "iana", + "compressible": true + }, + "application/cfw": { + "source": "iana" + }, + "application/city+json": { + "source": "iana", + "compressible": true + }, + "application/clr": { + "source": "iana" + }, + "application/clue+xml": { + "source": "iana", + "compressible": true + }, + "application/clue_info+xml": { + "source": "iana", + "compressible": true + }, + "application/cms": { + "source": "iana" + }, + "application/cnrp+xml": { + "source": "iana", + "compressible": true + }, + "application/coap-group+json": { + "source": "iana", + "compressible": true + }, + "application/coap-payload": { + "source": "iana" + }, + "application/commonground": { + "source": "iana" + }, + "application/conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/cose": { + "source": "iana" + }, + "application/cose-key": { + "source": "iana" + }, + "application/cose-key-set": { + "source": "iana" + }, + "application/cpl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cpl"] + }, + "application/csrattrs": { + "source": "iana" + }, + "application/csta+xml": { + "source": "iana", + "compressible": true + }, + "application/cstadata+xml": { + "source": "iana", + "compressible": true + }, + "application/csvm+json": { + "source": "iana", + "compressible": true + }, + "application/cu-seeme": { + "source": "apache", + "extensions": ["cu"] + }, + "application/cwt": { + "source": "iana" + }, + "application/cybercash": { + "source": "iana" + }, + "application/dart": { + "compressible": true + }, + "application/dash+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpd"] + }, + "application/dash-patch+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpp"] + }, + "application/dashdelta": { + "source": "iana" + }, + "application/davmount+xml": { + "source": "iana", + "compressible": true, + "extensions": ["davmount"] + }, + "application/dca-rft": { + "source": "iana" + }, + "application/dcd": { + "source": "iana" + }, + "application/dec-dx": { + "source": "iana" + }, + "application/dialog-info+xml": { + "source": "iana", + "compressible": true + }, + "application/dicom": { + "source": "iana" + }, + "application/dicom+json": { + "source": "iana", + "compressible": true + }, + "application/dicom+xml": { + "source": "iana", + "compressible": true + }, + "application/dii": { + "source": "iana" + }, + "application/dit": { + "source": "iana" + }, + "application/dns": { + "source": "iana" + }, + "application/dns+json": { + "source": "iana", + "compressible": true + }, + "application/dns-message": { + "source": "iana" + }, + "application/docbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dbk"] + }, + "application/dots+cbor": { + "source": "iana" + }, + "application/dskpp+xml": { + "source": "iana", + "compressible": true + }, + "application/dssc+der": { + "source": "iana", + "extensions": ["dssc"] + }, + "application/dssc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdssc"] + }, + "application/dvcs": { + "source": "iana" + }, + "application/ecmascript": { + "source": "iana", + "compressible": true, + "extensions": ["es","ecma"] + }, + "application/edi-consent": { + "source": "iana" + }, + "application/edi-x12": { + "source": "iana", + "compressible": false + }, + "application/edifact": { + "source": "iana", + "compressible": false + }, + "application/efi": { + "source": "iana" + }, + "application/elm+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/elm+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.cap+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/emergencycalldata.comment+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.control+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.deviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.ecall.msd": { + "source": "iana" + }, + "application/emergencycalldata.providerinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.serviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.subscriberinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.veds+xml": { + "source": "iana", + "compressible": true + }, + "application/emma+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emma"] + }, + "application/emotionml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emotionml"] + }, + "application/encaprtp": { + "source": "iana" + }, + "application/epp+xml": { + "source": "iana", + "compressible": true + }, + "application/epub+zip": { + "source": "iana", + "compressible": false, + "extensions": ["epub"] + }, + "application/eshop": { + "source": "iana" + }, + "application/exi": { + "source": "iana", + "extensions": ["exi"] + }, + "application/expect-ct-report+json": { + "source": "iana", + "compressible": true + }, + "application/express": { + "source": "iana", + "extensions": ["exp"] + }, + "application/fastinfoset": { + "source": "iana" + }, + "application/fastsoap": { + "source": "iana" + }, + "application/fdt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fdt"] + }, + "application/fhir+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fhir+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fido.trusted-apps+json": { + "compressible": true + }, + "application/fits": { + "source": "iana" + }, + "application/flexfec": { + "source": "iana" + }, + "application/font-sfnt": { + "source": "iana" + }, + "application/font-tdpfr": { + "source": "iana", + "extensions": ["pfr"] + }, + "application/font-woff": { + "source": "iana", + "compressible": false + }, + "application/framework-attributes+xml": { + "source": "iana", + "compressible": true + }, + "application/geo+json": { + "source": "iana", + "compressible": true, + "extensions": ["geojson"] + }, + "application/geo+json-seq": { + "source": "iana" + }, + "application/geopackage+sqlite3": { + "source": "iana" + }, + "application/geoxacml+xml": { + "source": "iana", + "compressible": true + }, + "application/gltf-buffer": { + "source": "iana" + }, + "application/gml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["gml"] + }, + "application/gpx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["gpx"] + }, + "application/gxf": { + "source": "apache", + "extensions": ["gxf"] + }, + "application/gzip": { + "source": "iana", + "compressible": false, + "extensions": ["gz"] + }, + "application/h224": { + "source": "iana" + }, + "application/held+xml": { + "source": "iana", + "compressible": true + }, + "application/hjson": { + "extensions": ["hjson"] + }, + "application/http": { + "source": "iana" + }, + "application/hyperstudio": { + "source": "iana", + "extensions": ["stk"] + }, + "application/ibe-key-request+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pkg-reply+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pp-data": { + "source": "iana" + }, + "application/iges": { + "source": "iana" + }, + "application/im-iscomposing+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/index": { + "source": "iana" + }, + "application/index.cmd": { + "source": "iana" + }, + "application/index.obj": { + "source": "iana" + }, + "application/index.response": { + "source": "iana" + }, + "application/index.vnd": { + "source": "iana" + }, + "application/inkml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ink","inkml"] + }, + "application/iotp": { + "source": "iana" + }, + "application/ipfix": { + "source": "iana", + "extensions": ["ipfix"] + }, + "application/ipp": { + "source": "iana" + }, + "application/isup": { + "source": "iana" + }, + "application/its+xml": { + "source": "iana", + "compressible": true, + "extensions": ["its"] + }, + "application/java-archive": { + "source": "apache", + "compressible": false, + "extensions": ["jar","war","ear"] + }, + "application/java-serialized-object": { + "source": "apache", + "compressible": false, + "extensions": ["ser"] + }, + "application/java-vm": { + "source": "apache", + "compressible": false, + "extensions": ["class"] + }, + "application/javascript": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["js","mjs"] + }, + "application/jf2feed+json": { + "source": "iana", + "compressible": true + }, + "application/jose": { + "source": "iana" + }, + "application/jose+json": { + "source": "iana", + "compressible": true + }, + "application/jrd+json": { + "source": "iana", + "compressible": true + }, + "application/jscalendar+json": { + "source": "iana", + "compressible": true + }, + "application/json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["json","map"] + }, + "application/json-patch+json": { + "source": "iana", + "compressible": true + }, + "application/json-seq": { + "source": "iana" + }, + "application/json5": { + "extensions": ["json5"] + }, + "application/jsonml+json": { + "source": "apache", + "compressible": true, + "extensions": ["jsonml"] + }, + "application/jwk+json": { + "source": "iana", + "compressible": true + }, + "application/jwk-set+json": { + "source": "iana", + "compressible": true + }, + "application/jwt": { + "source": "iana" + }, + "application/kpml-request+xml": { + "source": "iana", + "compressible": true + }, + "application/kpml-response+xml": { + "source": "iana", + "compressible": true + }, + "application/ld+json": { + "source": "iana", + "compressible": true, + "extensions": ["jsonld"] + }, + "application/lgr+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lgr"] + }, + "application/link-format": { + "source": "iana" + }, + "application/load-control+xml": { + "source": "iana", + "compressible": true + }, + "application/lost+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lostxml"] + }, + "application/lostsync+xml": { + "source": "iana", + "compressible": true + }, + "application/lpf+zip": { + "source": "iana", + "compressible": false + }, + "application/lxf": { + "source": "iana" + }, + "application/mac-binhex40": { + "source": "iana", + "extensions": ["hqx"] + }, + "application/mac-compactpro": { + "source": "apache", + "extensions": ["cpt"] + }, + "application/macwriteii": { + "source": "iana" + }, + "application/mads+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mads"] + }, + "application/manifest+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["webmanifest"] + }, + "application/marc": { + "source": "iana", + "extensions": ["mrc"] + }, + "application/marcxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mrcx"] + }, + "application/mathematica": { + "source": "iana", + "extensions": ["ma","nb","mb"] + }, + "application/mathml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mathml"] + }, + "application/mathml-content+xml": { + "source": "iana", + "compressible": true + }, + "application/mathml-presentation+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-associated-procedure-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-deregister+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-envelope+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-protection-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-reception-report+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-schedule+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-user-service-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbox": { + "source": "iana", + "extensions": ["mbox"] + }, + "application/media-policy-dataset+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpf"] + }, + "application/media_control+xml": { + "source": "iana", + "compressible": true + }, + "application/mediaservercontrol+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mscml"] + }, + "application/merge-patch+json": { + "source": "iana", + "compressible": true + }, + "application/metalink+xml": { + "source": "apache", + "compressible": true, + "extensions": ["metalink"] + }, + "application/metalink4+xml": { + "source": "iana", + "compressible": true, + "extensions": ["meta4"] + }, + "application/mets+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mets"] + }, + "application/mf4": { + "source": "iana" + }, + "application/mikey": { + "source": "iana" + }, + "application/mipc": { + "source": "iana" + }, + "application/missing-blocks+cbor-seq": { + "source": "iana" + }, + "application/mmt-aei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["maei"] + }, + "application/mmt-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musd"] + }, + "application/mods+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mods"] + }, + "application/moss-keys": { + "source": "iana" + }, + "application/moss-signature": { + "source": "iana" + }, + "application/mosskey-data": { + "source": "iana" + }, + "application/mosskey-request": { + "source": "iana" + }, + "application/mp21": { + "source": "iana", + "extensions": ["m21","mp21"] + }, + "application/mp4": { + "source": "iana", + "extensions": ["mp4s","m4p"] + }, + "application/mpeg4-generic": { + "source": "iana" + }, + "application/mpeg4-iod": { + "source": "iana" + }, + "application/mpeg4-iod-xmt": { + "source": "iana" + }, + "application/mrb-consumer+xml": { + "source": "iana", + "compressible": true + }, + "application/mrb-publish+xml": { + "source": "iana", + "compressible": true + }, + "application/msc-ivr+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msc-mixer+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msword": { + "source": "iana", + "compressible": false, + "extensions": ["doc","dot"] + }, + "application/mud+json": { + "source": "iana", + "compressible": true + }, + "application/multipart-core": { + "source": "iana" + }, + "application/mxf": { + "source": "iana", + "extensions": ["mxf"] + }, + "application/n-quads": { + "source": "iana", + "extensions": ["nq"] + }, + "application/n-triples": { + "source": "iana", + "extensions": ["nt"] + }, + "application/nasdata": { + "source": "iana" + }, + "application/news-checkgroups": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-groupinfo": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-transmission": { + "source": "iana" + }, + "application/nlsml+xml": { + "source": "iana", + "compressible": true + }, + "application/node": { + "source": "iana", + "extensions": ["cjs"] + }, + "application/nss": { + "source": "iana" + }, + "application/oauth-authz-req+jwt": { + "source": "iana" + }, + "application/oblivious-dns-message": { + "source": "iana" + }, + "application/ocsp-request": { + "source": "iana" + }, + "application/ocsp-response": { + "source": "iana" + }, + "application/octet-stream": { + "source": "iana", + "compressible": false, + "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] + }, + "application/oda": { + "source": "iana", + "extensions": ["oda"] + }, + "application/odm+xml": { + "source": "iana", + "compressible": true + }, + "application/odx": { + "source": "iana" + }, + "application/oebps-package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["opf"] + }, + "application/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogx"] + }, + "application/omdoc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["omdoc"] + }, + "application/onenote": { + "source": "apache", + "extensions": ["onetoc","onetoc2","onetmp","onepkg"] + }, + "application/opc-nodeset+xml": { + "source": "iana", + "compressible": true + }, + "application/oscore": { + "source": "iana" + }, + "application/oxps": { + "source": "iana", + "extensions": ["oxps"] + }, + "application/p21": { + "source": "iana" + }, + "application/p21+zip": { + "source": "iana", + "compressible": false + }, + "application/p2p-overlay+xml": { + "source": "iana", + "compressible": true, + "extensions": ["relo"] + }, + "application/parityfec": { + "source": "iana" + }, + "application/passport": { + "source": "iana" + }, + "application/patch-ops-error+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xer"] + }, + "application/pdf": { + "source": "iana", + "compressible": false, + "extensions": ["pdf"] + }, + "application/pdx": { + "source": "iana" + }, + "application/pem-certificate-chain": { + "source": "iana" + }, + "application/pgp-encrypted": { + "source": "iana", + "compressible": false, + "extensions": ["pgp"] + }, + "application/pgp-keys": { + "source": "iana", + "extensions": ["asc"] + }, + "application/pgp-signature": { + "source": "iana", + "extensions": ["asc","sig"] + }, + "application/pics-rules": { + "source": "apache", + "extensions": ["prf"] + }, + "application/pidf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pidf-diff+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pkcs10": { + "source": "iana", + "extensions": ["p10"] + }, + "application/pkcs12": { + "source": "iana" + }, + "application/pkcs7-mime": { + "source": "iana", + "extensions": ["p7m","p7c"] + }, + "application/pkcs7-signature": { + "source": "iana", + "extensions": ["p7s"] + }, + "application/pkcs8": { + "source": "iana", + "extensions": ["p8"] + }, + "application/pkcs8-encrypted": { + "source": "iana" + }, + "application/pkix-attr-cert": { + "source": "iana", + "extensions": ["ac"] + }, + "application/pkix-cert": { + "source": "iana", + "extensions": ["cer"] + }, + "application/pkix-crl": { + "source": "iana", + "extensions": ["crl"] + }, + "application/pkix-pkipath": { + "source": "iana", + "extensions": ["pkipath"] + }, + "application/pkixcmp": { + "source": "iana", + "extensions": ["pki"] + }, + "application/pls+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pls"] + }, + "application/poc-settings+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/postscript": { + "source": "iana", + "compressible": true, + "extensions": ["ai","eps","ps"] + }, + "application/ppsp-tracker+json": { + "source": "iana", + "compressible": true + }, + "application/problem+json": { + "source": "iana", + "compressible": true + }, + "application/problem+xml": { + "source": "iana", + "compressible": true + }, + "application/provenance+xml": { + "source": "iana", + "compressible": true, + "extensions": ["provx"] + }, + "application/prs.alvestrand.titrax-sheet": { + "source": "iana" + }, + "application/prs.cww": { + "source": "iana", + "extensions": ["cww"] + }, + "application/prs.cyn": { + "source": "iana", + "charset": "7-BIT" + }, + "application/prs.hpub+zip": { + "source": "iana", + "compressible": false + }, + "application/prs.nprend": { + "source": "iana" + }, + "application/prs.plucker": { + "source": "iana" + }, + "application/prs.rdf-xml-crypt": { + "source": "iana" + }, + "application/prs.xsf+xml": { + "source": "iana", + "compressible": true + }, + "application/pskc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pskcxml"] + }, + "application/pvd+json": { + "source": "iana", + "compressible": true + }, + "application/qsig": { + "source": "iana" + }, + "application/raml+yaml": { + "compressible": true, + "extensions": ["raml"] + }, + "application/raptorfec": { + "source": "iana" + }, + "application/rdap+json": { + "source": "iana", + "compressible": true + }, + "application/rdf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rdf","owl"] + }, + "application/reginfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rif"] + }, + "application/relax-ng-compact-syntax": { + "source": "iana", + "extensions": ["rnc"] + }, + "application/remote-printing": { + "source": "iana" + }, + "application/reputon+json": { + "source": "iana", + "compressible": true + }, + "application/resource-lists+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rl"] + }, + "application/resource-lists-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rld"] + }, + "application/rfc+xml": { + "source": "iana", + "compressible": true + }, + "application/riscos": { + "source": "iana" + }, + "application/rlmi+xml": { + "source": "iana", + "compressible": true + }, + "application/rls-services+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rs"] + }, + "application/route-apd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rapd"] + }, + "application/route-s-tsid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sls"] + }, + "application/route-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rusd"] + }, + "application/rpki-ghostbusters": { + "source": "iana", + "extensions": ["gbr"] + }, + "application/rpki-manifest": { + "source": "iana", + "extensions": ["mft"] + }, + "application/rpki-publication": { + "source": "iana" + }, + "application/rpki-roa": { + "source": "iana", + "extensions": ["roa"] + }, + "application/rpki-updown": { + "source": "iana" + }, + "application/rsd+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rsd"] + }, + "application/rss+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rss"] + }, + "application/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "application/rtploopback": { + "source": "iana" + }, + "application/rtx": { + "source": "iana" + }, + "application/samlassertion+xml": { + "source": "iana", + "compressible": true + }, + "application/samlmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/sarif+json": { + "source": "iana", + "compressible": true + }, + "application/sarif-external-properties+json": { + "source": "iana", + "compressible": true + }, + "application/sbe": { + "source": "iana" + }, + "application/sbml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sbml"] + }, + "application/scaip+xml": { + "source": "iana", + "compressible": true + }, + "application/scim+json": { + "source": "iana", + "compressible": true + }, + "application/scvp-cv-request": { + "source": "iana", + "extensions": ["scq"] + }, + "application/scvp-cv-response": { + "source": "iana", + "extensions": ["scs"] + }, + "application/scvp-vp-request": { + "source": "iana", + "extensions": ["spq"] + }, + "application/scvp-vp-response": { + "source": "iana", + "extensions": ["spp"] + }, + "application/sdp": { + "source": "iana", + "extensions": ["sdp"] + }, + "application/secevent+jwt": { + "source": "iana" + }, + "application/senml+cbor": { + "source": "iana" + }, + "application/senml+json": { + "source": "iana", + "compressible": true + }, + "application/senml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["senmlx"] + }, + "application/senml-etch+cbor": { + "source": "iana" + }, + "application/senml-etch+json": { + "source": "iana", + "compressible": true + }, + "application/senml-exi": { + "source": "iana" + }, + "application/sensml+cbor": { + "source": "iana" + }, + "application/sensml+json": { + "source": "iana", + "compressible": true + }, + "application/sensml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sensmlx"] + }, + "application/sensml-exi": { + "source": "iana" + }, + "application/sep+xml": { + "source": "iana", + "compressible": true + }, + "application/sep-exi": { + "source": "iana" + }, + "application/session-info": { + "source": "iana" + }, + "application/set-payment": { + "source": "iana" + }, + "application/set-payment-initiation": { + "source": "iana", + "extensions": ["setpay"] + }, + "application/set-registration": { + "source": "iana" + }, + "application/set-registration-initiation": { + "source": "iana", + "extensions": ["setreg"] + }, + "application/sgml": { + "source": "iana" + }, + "application/sgml-open-catalog": { + "source": "iana" + }, + "application/shf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["shf"] + }, + "application/sieve": { + "source": "iana", + "extensions": ["siv","sieve"] + }, + "application/simple-filter+xml": { + "source": "iana", + "compressible": true + }, + "application/simple-message-summary": { + "source": "iana" + }, + "application/simplesymbolcontainer": { + "source": "iana" + }, + "application/sipc": { + "source": "iana" + }, + "application/slate": { + "source": "iana" + }, + "application/smil": { + "source": "iana" + }, + "application/smil+xml": { + "source": "iana", + "compressible": true, + "extensions": ["smi","smil"] + }, + "application/smpte336m": { + "source": "iana" + }, + "application/soap+fastinfoset": { + "source": "iana" + }, + "application/soap+xml": { + "source": "iana", + "compressible": true + }, + "application/sparql-query": { + "source": "iana", + "extensions": ["rq"] + }, + "application/sparql-results+xml": { + "source": "iana", + "compressible": true, + "extensions": ["srx"] + }, + "application/spdx+json": { + "source": "iana", + "compressible": true + }, + "application/spirits-event+xml": { + "source": "iana", + "compressible": true + }, + "application/sql": { + "source": "iana" + }, + "application/srgs": { + "source": "iana", + "extensions": ["gram"] + }, + "application/srgs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["grxml"] + }, + "application/sru+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sru"] + }, + "application/ssdl+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ssdl"] + }, + "application/ssml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ssml"] + }, + "application/stix+json": { + "source": "iana", + "compressible": true + }, + "application/swid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["swidtag"] + }, + "application/tamp-apex-update": { + "source": "iana" + }, + "application/tamp-apex-update-confirm": { + "source": "iana" + }, + "application/tamp-community-update": { + "source": "iana" + }, + "application/tamp-community-update-confirm": { + "source": "iana" + }, + "application/tamp-error": { + "source": "iana" + }, + "application/tamp-sequence-adjust": { + "source": "iana" + }, + "application/tamp-sequence-adjust-confirm": { + "source": "iana" + }, + "application/tamp-status-query": { + "source": "iana" + }, + "application/tamp-status-response": { + "source": "iana" + }, + "application/tamp-update": { + "source": "iana" + }, + "application/tamp-update-confirm": { + "source": "iana" + }, + "application/tar": { + "compressible": true + }, + "application/taxii+json": { + "source": "iana", + "compressible": true + }, + "application/td+json": { + "source": "iana", + "compressible": true + }, + "application/tei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tei","teicorpus"] + }, + "application/tetra_isi": { + "source": "iana" + }, + "application/thraud+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tfi"] + }, + "application/timestamp-query": { + "source": "iana" + }, + "application/timestamp-reply": { + "source": "iana" + }, + "application/timestamped-data": { + "source": "iana", + "extensions": ["tsd"] + }, + "application/tlsrpt+gzip": { + "source": "iana" + }, + "application/tlsrpt+json": { + "source": "iana", + "compressible": true + }, + "application/tnauthlist": { + "source": "iana" + }, + "application/token-introspection+jwt": { + "source": "iana" + }, + "application/toml": { + "compressible": true, + "extensions": ["toml"] + }, + "application/trickle-ice-sdpfrag": { + "source": "iana" + }, + "application/trig": { + "source": "iana", + "extensions": ["trig"] + }, + "application/ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ttml"] + }, + "application/tve-trigger": { + "source": "iana" + }, + "application/tzif": { + "source": "iana" + }, + "application/tzif-leap": { + "source": "iana" + }, + "application/ubjson": { + "compressible": false, + "extensions": ["ubj"] + }, + "application/ulpfec": { + "source": "iana" + }, + "application/urc-grpsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-ressheet+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsheet"] + }, + "application/urc-targetdesc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["td"] + }, + "application/urc-uisocketdesc+xml": { + "source": "iana", + "compressible": true + }, + "application/vcard+json": { + "source": "iana", + "compressible": true + }, + "application/vcard+xml": { + "source": "iana", + "compressible": true + }, + "application/vemmi": { + "source": "iana" + }, + "application/vividence.scriptfile": { + "source": "apache" + }, + "application/vnd.1000minds.decision-model+xml": { + "source": "iana", + "compressible": true, + "extensions": ["1km"] + }, + "application/vnd.3gpp-prose+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-prose-pc3ch+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-v2x-local-service-information": { + "source": "iana" + }, + "application/vnd.3gpp.5gnas": { + "source": "iana" + }, + "application/vnd.3gpp.access-transfer-events+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.bsf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gmop+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gtpc": { + "source": "iana" + }, + "application/vnd.3gpp.interworking-data": { + "source": "iana" + }, + "application/vnd.3gpp.lpp": { + "source": "iana" + }, + "application/vnd.3gpp.mc-signalling-ear": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-payload": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-signalling": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-floor-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-signed+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-init-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-transmission-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mid-call+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ngap": { + "source": "iana" + }, + "application/vnd.3gpp.pfcp": { + "source": "iana" + }, + "application/vnd.3gpp.pic-bw-large": { + "source": "iana", + "extensions": ["plb"] + }, + "application/vnd.3gpp.pic-bw-small": { + "source": "iana", + "extensions": ["psb"] + }, + "application/vnd.3gpp.pic-bw-var": { + "source": "iana", + "extensions": ["pvb"] + }, + "application/vnd.3gpp.s1ap": { + "source": "iana" + }, + "application/vnd.3gpp.sms": { + "source": "iana" + }, + "application/vnd.3gpp.sms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-ext+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.state-and-event-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ussd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.bcmcsinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.sms": { + "source": "iana" + }, + "application/vnd.3gpp2.tcap": { + "source": "iana", + "extensions": ["tcap"] + }, + "application/vnd.3lightssoftware.imagescal": { + "source": "iana" + }, + "application/vnd.3m.post-it-notes": { + "source": "iana", + "extensions": ["pwn"] + }, + "application/vnd.accpac.simply.aso": { + "source": "iana", + "extensions": ["aso"] + }, + "application/vnd.accpac.simply.imp": { + "source": "iana", + "extensions": ["imp"] + }, + "application/vnd.acucobol": { + "source": "iana", + "extensions": ["acu"] + }, + "application/vnd.acucorp": { + "source": "iana", + "extensions": ["atc","acutc"] + }, + "application/vnd.adobe.air-application-installer-package+zip": { + "source": "apache", + "compressible": false, + "extensions": ["air"] + }, + "application/vnd.adobe.flash.movie": { + "source": "iana" + }, + "application/vnd.adobe.formscentral.fcdt": { + "source": "iana", + "extensions": ["fcdt"] + }, + "application/vnd.adobe.fxp": { + "source": "iana", + "extensions": ["fxp","fxpl"] + }, + "application/vnd.adobe.partial-upload": { + "source": "iana" + }, + "application/vnd.adobe.xdp+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdp"] + }, + "application/vnd.adobe.xfdf": { + "source": "iana", + "extensions": ["xfdf"] + }, + "application/vnd.aether.imp": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata-pagedef": { + "source": "iana" + }, + "application/vnd.afpc.cmoca-cmresource": { + "source": "iana" + }, + "application/vnd.afpc.foca-charset": { + "source": "iana" + }, + "application/vnd.afpc.foca-codedfont": { + "source": "iana" + }, + "application/vnd.afpc.foca-codepage": { + "source": "iana" + }, + "application/vnd.afpc.modca": { + "source": "iana" + }, + "application/vnd.afpc.modca-cmtable": { + "source": "iana" + }, + "application/vnd.afpc.modca-formdef": { + "source": "iana" + }, + "application/vnd.afpc.modca-mediummap": { + "source": "iana" + }, + "application/vnd.afpc.modca-objectcontainer": { + "source": "iana" + }, + "application/vnd.afpc.modca-overlay": { + "source": "iana" + }, + "application/vnd.afpc.modca-pagesegment": { + "source": "iana" + }, + "application/vnd.age": { + "source": "iana", + "extensions": ["age"] + }, + "application/vnd.ah-barcode": { + "source": "iana" + }, + "application/vnd.ahead.space": { + "source": "iana", + "extensions": ["ahead"] + }, + "application/vnd.airzip.filesecure.azf": { + "source": "iana", + "extensions": ["azf"] + }, + "application/vnd.airzip.filesecure.azs": { + "source": "iana", + "extensions": ["azs"] + }, + "application/vnd.amadeus+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.amazon.ebook": { + "source": "apache", + "extensions": ["azw"] + }, + "application/vnd.amazon.mobi8-ebook": { + "source": "iana" + }, + "application/vnd.americandynamics.acc": { + "source": "iana", + "extensions": ["acc"] + }, + "application/vnd.amiga.ami": { + "source": "iana", + "extensions": ["ami"] + }, + "application/vnd.amundsen.maze+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.android.ota": { + "source": "iana" + }, + "application/vnd.android.package-archive": { + "source": "apache", + "compressible": false, + "extensions": ["apk"] + }, + "application/vnd.anki": { + "source": "iana" + }, + "application/vnd.anser-web-certificate-issue-initiation": { + "source": "iana", + "extensions": ["cii"] + }, + "application/vnd.anser-web-funds-transfer-initiation": { + "source": "apache", + "extensions": ["fti"] + }, + "application/vnd.antix.game-component": { + "source": "iana", + "extensions": ["atx"] + }, + "application/vnd.apache.arrow.file": { + "source": "iana" + }, + "application/vnd.apache.arrow.stream": { + "source": "iana" + }, + "application/vnd.apache.thrift.binary": { + "source": "iana" + }, + "application/vnd.apache.thrift.compact": { + "source": "iana" + }, + "application/vnd.apache.thrift.json": { + "source": "iana" + }, + "application/vnd.api+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.aplextor.warrp+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apothekende.reservation+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apple.installer+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpkg"] + }, + "application/vnd.apple.keynote": { + "source": "iana", + "extensions": ["key"] + }, + "application/vnd.apple.mpegurl": { + "source": "iana", + "extensions": ["m3u8"] + }, + "application/vnd.apple.numbers": { + "source": "iana", + "extensions": ["numbers"] + }, + "application/vnd.apple.pages": { + "source": "iana", + "extensions": ["pages"] + }, + "application/vnd.apple.pkpass": { + "compressible": false, + "extensions": ["pkpass"] + }, + "application/vnd.arastra.swi": { + "source": "iana" + }, + "application/vnd.aristanetworks.swi": { + "source": "iana", + "extensions": ["swi"] + }, + "application/vnd.artisan+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.artsquare": { + "source": "iana" + }, + "application/vnd.astraea-software.iota": { + "source": "iana", + "extensions": ["iota"] + }, + "application/vnd.audiograph": { + "source": "iana", + "extensions": ["aep"] + }, + "application/vnd.autopackage": { + "source": "iana" + }, + "application/vnd.avalon+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.avistar+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.balsamiq.bmml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["bmml"] + }, + "application/vnd.balsamiq.bmpr": { + "source": "iana" + }, + "application/vnd.banana-accounting": { + "source": "iana" + }, + "application/vnd.bbf.usp.error": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bekitzur-stech+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bint.med-content": { + "source": "iana" + }, + "application/vnd.biopax.rdf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.blink-idb-value-wrapper": { + "source": "iana" + }, + "application/vnd.blueice.multipass": { + "source": "iana", + "extensions": ["mpm"] + }, + "application/vnd.bluetooth.ep.oob": { + "source": "iana" + }, + "application/vnd.bluetooth.le.oob": { + "source": "iana" + }, + "application/vnd.bmi": { + "source": "iana", + "extensions": ["bmi"] + }, + "application/vnd.bpf": { + "source": "iana" + }, + "application/vnd.bpf3": { + "source": "iana" + }, + "application/vnd.businessobjects": { + "source": "iana", + "extensions": ["rep"] + }, + "application/vnd.byu.uapi+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cab-jscript": { + "source": "iana" + }, + "application/vnd.canon-cpdl": { + "source": "iana" + }, + "application/vnd.canon-lips": { + "source": "iana" + }, + "application/vnd.capasystems-pg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cendio.thinlinc.clientconf": { + "source": "iana" + }, + "application/vnd.century-systems.tcp_stream": { + "source": "iana" + }, + "application/vnd.chemdraw+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdxml"] + }, + "application/vnd.chess-pgn": { + "source": "iana" + }, + "application/vnd.chipnuts.karaoke-mmd": { + "source": "iana", + "extensions": ["mmd"] + }, + "application/vnd.ciedi": { + "source": "iana" + }, + "application/vnd.cinderella": { + "source": "iana", + "extensions": ["cdy"] + }, + "application/vnd.cirpack.isdn-ext": { + "source": "iana" + }, + "application/vnd.citationstyles.style+xml": { + "source": "iana", + "compressible": true, + "extensions": ["csl"] + }, + "application/vnd.claymore": { + "source": "iana", + "extensions": ["cla"] + }, + "application/vnd.cloanto.rp9": { + "source": "iana", + "extensions": ["rp9"] + }, + "application/vnd.clonk.c4group": { + "source": "iana", + "extensions": ["c4g","c4d","c4f","c4p","c4u"] + }, + "application/vnd.cluetrust.cartomobile-config": { + "source": "iana", + "extensions": ["c11amc"] + }, + "application/vnd.cluetrust.cartomobile-config-pkg": { + "source": "iana", + "extensions": ["c11amz"] + }, + "application/vnd.coffeescript": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet-template": { + "source": "iana" + }, + "application/vnd.collection+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.doc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.next+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.comicbook+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.comicbook-rar": { + "source": "iana" + }, + "application/vnd.commerce-battelle": { + "source": "iana" + }, + "application/vnd.commonspace": { + "source": "iana", + "extensions": ["csp"] + }, + "application/vnd.contact.cmsg": { + "source": "iana", + "extensions": ["cdbcmsg"] + }, + "application/vnd.coreos.ignition+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cosmocaller": { + "source": "iana", + "extensions": ["cmc"] + }, + "application/vnd.crick.clicker": { + "source": "iana", + "extensions": ["clkx"] + }, + "application/vnd.crick.clicker.keyboard": { + "source": "iana", + "extensions": ["clkk"] + }, + "application/vnd.crick.clicker.palette": { + "source": "iana", + "extensions": ["clkp"] + }, + "application/vnd.crick.clicker.template": { + "source": "iana", + "extensions": ["clkt"] + }, + "application/vnd.crick.clicker.wordbank": { + "source": "iana", + "extensions": ["clkw"] + }, + "application/vnd.criticaltools.wbs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wbs"] + }, + "application/vnd.cryptii.pipe+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.crypto-shade-file": { + "source": "iana" + }, + "application/vnd.cryptomator.encrypted": { + "source": "iana" + }, + "application/vnd.cryptomator.vault": { + "source": "iana" + }, + "application/vnd.ctc-posml": { + "source": "iana", + "extensions": ["pml"] + }, + "application/vnd.ctct.ws+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cups-pdf": { + "source": "iana" + }, + "application/vnd.cups-postscript": { + "source": "iana" + }, + "application/vnd.cups-ppd": { + "source": "iana", + "extensions": ["ppd"] + }, + "application/vnd.cups-raster": { + "source": "iana" + }, + "application/vnd.cups-raw": { + "source": "iana" + }, + "application/vnd.curl": { + "source": "iana" + }, + "application/vnd.curl.car": { + "source": "apache", + "extensions": ["car"] + }, + "application/vnd.curl.pcurl": { + "source": "apache", + "extensions": ["pcurl"] + }, + "application/vnd.cyan.dean.root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cybank": { + "source": "iana" + }, + "application/vnd.cyclonedx+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cyclonedx+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.d2l.coursepackage1p0+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.d3m-dataset": { + "source": "iana" + }, + "application/vnd.d3m-problem": { + "source": "iana" + }, + "application/vnd.dart": { + "source": "iana", + "compressible": true, + "extensions": ["dart"] + }, + "application/vnd.data-vision.rdz": { + "source": "iana", + "extensions": ["rdz"] + }, + "application/vnd.datapackage+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dataresource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dbf": { + "source": "iana", + "extensions": ["dbf"] + }, + "application/vnd.debian.binary-package": { + "source": "iana" + }, + "application/vnd.dece.data": { + "source": "iana", + "extensions": ["uvf","uvvf","uvd","uvvd"] + }, + "application/vnd.dece.ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uvt","uvvt"] + }, + "application/vnd.dece.unspecified": { + "source": "iana", + "extensions": ["uvx","uvvx"] + }, + "application/vnd.dece.zip": { + "source": "iana", + "extensions": ["uvz","uvvz"] + }, + "application/vnd.denovo.fcselayout-link": { + "source": "iana", + "extensions": ["fe_launch"] + }, + "application/vnd.desmume.movie": { + "source": "iana" + }, + "application/vnd.dir-bi.plate-dl-nosuffix": { + "source": "iana" + }, + "application/vnd.dm.delegation+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dna": { + "source": "iana", + "extensions": ["dna"] + }, + "application/vnd.document+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dolby.mlp": { + "source": "apache", + "extensions": ["mlp"] + }, + "application/vnd.dolby.mobile.1": { + "source": "iana" + }, + "application/vnd.dolby.mobile.2": { + "source": "iana" + }, + "application/vnd.doremir.scorecloud-binary-document": { + "source": "iana" + }, + "application/vnd.dpgraph": { + "source": "iana", + "extensions": ["dpg"] + }, + "application/vnd.dreamfactory": { + "source": "iana", + "extensions": ["dfac"] + }, + "application/vnd.drive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ds-keypoint": { + "source": "apache", + "extensions": ["kpxx"] + }, + "application/vnd.dtg.local": { + "source": "iana" + }, + "application/vnd.dtg.local.flash": { + "source": "iana" + }, + "application/vnd.dtg.local.html": { + "source": "iana" + }, + "application/vnd.dvb.ait": { + "source": "iana", + "extensions": ["ait"] + }, + "application/vnd.dvb.dvbisl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.dvbj": { + "source": "iana" + }, + "application/vnd.dvb.esgcontainer": { + "source": "iana" + }, + "application/vnd.dvb.ipdcdftnotifaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess2": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgpdd": { + "source": "iana" + }, + "application/vnd.dvb.ipdcroaming": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-base": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-enhancement": { + "source": "iana" + }, + "application/vnd.dvb.notif-aggregate-root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-container+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-generic+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-msglist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-response+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-init+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.pfr": { + "source": "iana" + }, + "application/vnd.dvb.service": { + "source": "iana", + "extensions": ["svc"] + }, + "application/vnd.dxr": { + "source": "iana" + }, + "application/vnd.dynageo": { + "source": "iana", + "extensions": ["geo"] + }, + "application/vnd.dzr": { + "source": "iana" + }, + "application/vnd.easykaraoke.cdgdownload": { + "source": "iana" + }, + "application/vnd.ecdis-update": { + "source": "iana" + }, + "application/vnd.ecip.rlp": { + "source": "iana" + }, + "application/vnd.eclipse.ditto+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ecowin.chart": { + "source": "iana", + "extensions": ["mag"] + }, + "application/vnd.ecowin.filerequest": { + "source": "iana" + }, + "application/vnd.ecowin.fileupdate": { + "source": "iana" + }, + "application/vnd.ecowin.series": { + "source": "iana" + }, + "application/vnd.ecowin.seriesrequest": { + "source": "iana" + }, + "application/vnd.ecowin.seriesupdate": { + "source": "iana" + }, + "application/vnd.efi.img": { + "source": "iana" + }, + "application/vnd.efi.iso": { + "source": "iana" + }, + "application/vnd.emclient.accessrequest+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.enliven": { + "source": "iana", + "extensions": ["nml"] + }, + "application/vnd.enphase.envoy": { + "source": "iana" + }, + "application/vnd.eprints.data+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.epson.esf": { + "source": "iana", + "extensions": ["esf"] + }, + "application/vnd.epson.msf": { + "source": "iana", + "extensions": ["msf"] + }, + "application/vnd.epson.quickanime": { + "source": "iana", + "extensions": ["qam"] + }, + "application/vnd.epson.salt": { + "source": "iana", + "extensions": ["slt"] + }, + "application/vnd.epson.ssf": { + "source": "iana", + "extensions": ["ssf"] + }, + "application/vnd.ericsson.quickcall": { + "source": "iana" + }, + "application/vnd.espass-espass+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.eszigno3+xml": { + "source": "iana", + "compressible": true, + "extensions": ["es3","et3"] + }, + "application/vnd.etsi.aoc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.asic-e+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.asic-s+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.cug+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvcommand+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-bc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-cod+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-npvr+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvservice+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsync+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mcid+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mheg5": { + "source": "iana" + }, + "application/vnd.etsi.overload-control-policy-dataset+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.pstn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.sci+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.simservs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.timestamp-token": { + "source": "iana" + }, + "application/vnd.etsi.tsl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.tsl.der": { + "source": "iana" + }, + "application/vnd.eu.kasparian.car+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.eudora.data": { + "source": "iana" + }, + "application/vnd.evolv.ecig.profile": { + "source": "iana" + }, + "application/vnd.evolv.ecig.settings": { + "source": "iana" + }, + "application/vnd.evolv.ecig.theme": { + "source": "iana" + }, + "application/vnd.exstream-empower+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.exstream-package": { + "source": "iana" + }, + "application/vnd.ezpix-album": { + "source": "iana", + "extensions": ["ez2"] + }, + "application/vnd.ezpix-package": { + "source": "iana", + "extensions": ["ez3"] + }, + "application/vnd.f-secure.mobile": { + "source": "iana" + }, + "application/vnd.familysearch.gedcom+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.fastcopy-disk-image": { + "source": "iana" + }, + "application/vnd.fdf": { + "source": "iana", + "extensions": ["fdf"] + }, + "application/vnd.fdsn.mseed": { + "source": "iana", + "extensions": ["mseed"] + }, + "application/vnd.fdsn.seed": { + "source": "iana", + "extensions": ["seed","dataless"] + }, + "application/vnd.ffsns": { + "source": "iana" + }, + "application/vnd.ficlab.flb+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.filmit.zfc": { + "source": "iana" + }, + "application/vnd.fints": { + "source": "iana" + }, + "application/vnd.firemonkeys.cloudcell": { + "source": "iana" + }, + "application/vnd.flographit": { + "source": "iana", + "extensions": ["gph"] + }, + "application/vnd.fluxtime.clip": { + "source": "iana", + "extensions": ["ftc"] + }, + "application/vnd.font-fontforge-sfd": { + "source": "iana" + }, + "application/vnd.framemaker": { + "source": "iana", + "extensions": ["fm","frame","maker","book"] + }, + "application/vnd.frogans.fnc": { + "source": "iana", + "extensions": ["fnc"] + }, + "application/vnd.frogans.ltf": { + "source": "iana", + "extensions": ["ltf"] + }, + "application/vnd.fsc.weblaunch": { + "source": "iana", + "extensions": ["fsc"] + }, + "application/vnd.fujifilm.fb.docuworks": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.binder": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.jfi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.fujitsu.oasys": { + "source": "iana", + "extensions": ["oas"] + }, + "application/vnd.fujitsu.oasys2": { + "source": "iana", + "extensions": ["oa2"] + }, + "application/vnd.fujitsu.oasys3": { + "source": "iana", + "extensions": ["oa3"] + }, + "application/vnd.fujitsu.oasysgp": { + "source": "iana", + "extensions": ["fg5"] + }, + "application/vnd.fujitsu.oasysprs": { + "source": "iana", + "extensions": ["bh2"] + }, + "application/vnd.fujixerox.art-ex": { + "source": "iana" + }, + "application/vnd.fujixerox.art4": { + "source": "iana" + }, + "application/vnd.fujixerox.ddd": { + "source": "iana", + "extensions": ["ddd"] + }, + "application/vnd.fujixerox.docuworks": { + "source": "iana", + "extensions": ["xdw"] + }, + "application/vnd.fujixerox.docuworks.binder": { + "source": "iana", + "extensions": ["xbd"] + }, + "application/vnd.fujixerox.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujixerox.hbpl": { + "source": "iana" + }, + "application/vnd.fut-misnet": { + "source": "iana" + }, + "application/vnd.futoin+cbor": { + "source": "iana" + }, + "application/vnd.futoin+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.fuzzysheet": { + "source": "iana", + "extensions": ["fzs"] + }, + "application/vnd.genomatix.tuxedo": { + "source": "iana", + "extensions": ["txd"] + }, + "application/vnd.gentics.grd+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geo+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geocube+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.geogebra.file": { + "source": "iana", + "extensions": ["ggb"] + }, + "application/vnd.geogebra.slides": { + "source": "iana" + }, + "application/vnd.geogebra.tool": { + "source": "iana", + "extensions": ["ggt"] + }, + "application/vnd.geometry-explorer": { + "source": "iana", + "extensions": ["gex","gre"] + }, + "application/vnd.geonext": { + "source": "iana", + "extensions": ["gxt"] + }, + "application/vnd.geoplan": { + "source": "iana", + "extensions": ["g2w"] + }, + "application/vnd.geospace": { + "source": "iana", + "extensions": ["g3w"] + }, + "application/vnd.gerber": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt-response": { + "source": "iana" + }, + "application/vnd.gmx": { + "source": "iana", + "extensions": ["gmx"] + }, + "application/vnd.google-apps.document": { + "compressible": false, + "extensions": ["gdoc"] + }, + "application/vnd.google-apps.presentation": { + "compressible": false, + "extensions": ["gslides"] + }, + "application/vnd.google-apps.spreadsheet": { + "compressible": false, + "extensions": ["gsheet"] + }, + "application/vnd.google-earth.kml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["kml"] + }, + "application/vnd.google-earth.kmz": { + "source": "iana", + "compressible": false, + "extensions": ["kmz"] + }, + "application/vnd.gov.sk.e-form+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.gov.sk.e-form+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.gov.sk.xmldatacontainer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.grafeq": { + "source": "iana", + "extensions": ["gqf","gqs"] + }, + "application/vnd.gridmp": { + "source": "iana" + }, + "application/vnd.groove-account": { + "source": "iana", + "extensions": ["gac"] + }, + "application/vnd.groove-help": { + "source": "iana", + "extensions": ["ghf"] + }, + "application/vnd.groove-identity-message": { + "source": "iana", + "extensions": ["gim"] + }, + "application/vnd.groove-injector": { + "source": "iana", + "extensions": ["grv"] + }, + "application/vnd.groove-tool-message": { + "source": "iana", + "extensions": ["gtm"] + }, + "application/vnd.groove-tool-template": { + "source": "iana", + "extensions": ["tpl"] + }, + "application/vnd.groove-vcard": { + "source": "iana", + "extensions": ["vcg"] + }, + "application/vnd.hal+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hal+xml": { + "source": "iana", + "compressible": true, + "extensions": ["hal"] + }, + "application/vnd.handheld-entertainment+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zmm"] + }, + "application/vnd.hbci": { + "source": "iana", + "extensions": ["hbci"] + }, + "application/vnd.hc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hcl-bireports": { + "source": "iana" + }, + "application/vnd.hdt": { + "source": "iana" + }, + "application/vnd.heroku+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hhe.lesson-player": { + "source": "iana", + "extensions": ["les"] + }, + "application/vnd.hl7cda+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hl7v2+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hp-hpgl": { + "source": "iana", + "extensions": ["hpgl"] + }, + "application/vnd.hp-hpid": { + "source": "iana", + "extensions": ["hpid"] + }, + "application/vnd.hp-hps": { + "source": "iana", + "extensions": ["hps"] + }, + "application/vnd.hp-jlyt": { + "source": "iana", + "extensions": ["jlt"] + }, + "application/vnd.hp-pcl": { + "source": "iana", + "extensions": ["pcl"] + }, + "application/vnd.hp-pclxl": { + "source": "iana", + "extensions": ["pclxl"] + }, + "application/vnd.httphone": { + "source": "iana" + }, + "application/vnd.hydrostatix.sof-data": { + "source": "iana", + "extensions": ["sfd-hdstx"] + }, + "application/vnd.hyper+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyper-item+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyperdrive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hzn-3d-crossword": { + "source": "iana" + }, + "application/vnd.ibm.afplinedata": { + "source": "iana" + }, + "application/vnd.ibm.electronic-media": { + "source": "iana" + }, + "application/vnd.ibm.minipay": { + "source": "iana", + "extensions": ["mpy"] + }, + "application/vnd.ibm.modcap": { + "source": "iana", + "extensions": ["afp","listafp","list3820"] + }, + "application/vnd.ibm.rights-management": { + "source": "iana", + "extensions": ["irm"] + }, + "application/vnd.ibm.secure-container": { + "source": "iana", + "extensions": ["sc"] + }, + "application/vnd.iccprofile": { + "source": "iana", + "extensions": ["icc","icm"] + }, + "application/vnd.ieee.1905": { + "source": "iana" + }, + "application/vnd.igloader": { + "source": "iana", + "extensions": ["igl"] + }, + "application/vnd.imagemeter.folder+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.imagemeter.image+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.immervision-ivp": { + "source": "iana", + "extensions": ["ivp"] + }, + "application/vnd.immervision-ivu": { + "source": "iana", + "extensions": ["ivu"] + }, + "application/vnd.ims.imsccv1p1": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p2": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p3": { + "source": "iana" + }, + "application/vnd.ims.lis.v2.result+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolconsumerprofile+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy.id+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings.simple+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.informedcontrol.rms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.informix-visionary": { + "source": "iana" + }, + "application/vnd.infotech.project": { + "source": "iana" + }, + "application/vnd.infotech.project+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.innopath.wamp.notification": { + "source": "iana" + }, + "application/vnd.insors.igm": { + "source": "iana", + "extensions": ["igm"] + }, + "application/vnd.intercon.formnet": { + "source": "iana", + "extensions": ["xpw","xpx"] + }, + "application/vnd.intergeo": { + "source": "iana", + "extensions": ["i2g"] + }, + "application/vnd.intertrust.digibox": { + "source": "iana" + }, + "application/vnd.intertrust.nncp": { + "source": "iana" + }, + "application/vnd.intu.qbo": { + "source": "iana", + "extensions": ["qbo"] + }, + "application/vnd.intu.qfx": { + "source": "iana", + "extensions": ["qfx"] + }, + "application/vnd.iptc.g2.catalogitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.conceptitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.knowledgeitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.packageitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.planningitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ipunplugged.rcprofile": { + "source": "iana", + "extensions": ["rcprofile"] + }, + "application/vnd.irepository.package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["irp"] + }, + "application/vnd.is-xpr": { + "source": "iana", + "extensions": ["xpr"] + }, + "application/vnd.isac.fcs": { + "source": "iana", + "extensions": ["fcs"] + }, + "application/vnd.iso11783-10+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.jam": { + "source": "iana", + "extensions": ["jam"] + }, + "application/vnd.japannet-directory-service": { + "source": "iana" + }, + "application/vnd.japannet-jpnstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-payment-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-registration": { + "source": "iana" + }, + "application/vnd.japannet-registration-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-setstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-verification": { + "source": "iana" + }, + "application/vnd.japannet-verification-wakeup": { + "source": "iana" + }, + "application/vnd.jcp.javame.midlet-rms": { + "source": "iana", + "extensions": ["rms"] + }, + "application/vnd.jisp": { + "source": "iana", + "extensions": ["jisp"] + }, + "application/vnd.joost.joda-archive": { + "source": "iana", + "extensions": ["joda"] + }, + "application/vnd.jsk.isdn-ngn": { + "source": "iana" + }, + "application/vnd.kahootz": { + "source": "iana", + "extensions": ["ktz","ktr"] + }, + "application/vnd.kde.karbon": { + "source": "iana", + "extensions": ["karbon"] + }, + "application/vnd.kde.kchart": { + "source": "iana", + "extensions": ["chrt"] + }, + "application/vnd.kde.kformula": { + "source": "iana", + "extensions": ["kfo"] + }, + "application/vnd.kde.kivio": { + "source": "iana", + "extensions": ["flw"] + }, + "application/vnd.kde.kontour": { + "source": "iana", + "extensions": ["kon"] + }, + "application/vnd.kde.kpresenter": { + "source": "iana", + "extensions": ["kpr","kpt"] + }, + "application/vnd.kde.kspread": { + "source": "iana", + "extensions": ["ksp"] + }, + "application/vnd.kde.kword": { + "source": "iana", + "extensions": ["kwd","kwt"] + }, + "application/vnd.kenameaapp": { + "source": "iana", + "extensions": ["htke"] + }, + "application/vnd.kidspiration": { + "source": "iana", + "extensions": ["kia"] + }, + "application/vnd.kinar": { + "source": "iana", + "extensions": ["kne","knp"] + }, + "application/vnd.koan": { + "source": "iana", + "extensions": ["skp","skd","skt","skm"] + }, + "application/vnd.kodak-descriptor": { + "source": "iana", + "extensions": ["sse"] + }, + "application/vnd.las": { + "source": "iana" + }, + "application/vnd.las.las+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.las.las+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lasxml"] + }, + "application/vnd.laszip": { + "source": "iana" + }, + "application/vnd.leap+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.liberty-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.llamagraphics.life-balance.desktop": { + "source": "iana", + "extensions": ["lbd"] + }, + "application/vnd.llamagraphics.life-balance.exchange+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lbe"] + }, + "application/vnd.logipipe.circuit+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.loom": { + "source": "iana" + }, + "application/vnd.lotus-1-2-3": { + "source": "iana", + "extensions": ["123"] + }, + "application/vnd.lotus-approach": { + "source": "iana", + "extensions": ["apr"] + }, + "application/vnd.lotus-freelance": { + "source": "iana", + "extensions": ["pre"] + }, + "application/vnd.lotus-notes": { + "source": "iana", + "extensions": ["nsf"] + }, + "application/vnd.lotus-organizer": { + "source": "iana", + "extensions": ["org"] + }, + "application/vnd.lotus-screencam": { + "source": "iana", + "extensions": ["scm"] + }, + "application/vnd.lotus-wordpro": { + "source": "iana", + "extensions": ["lwp"] + }, + "application/vnd.macports.portpkg": { + "source": "iana", + "extensions": ["portpkg"] + }, + "application/vnd.mapbox-vector-tile": { + "source": "iana", + "extensions": ["mvt"] + }, + "application/vnd.marlin.drm.actiontoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.conftoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.license+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.mdcf": { + "source": "iana" + }, + "application/vnd.mason+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.maxar.archive.3tz+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.maxmind.maxmind-db": { + "source": "iana" + }, + "application/vnd.mcd": { + "source": "iana", + "extensions": ["mcd"] + }, + "application/vnd.medcalcdata": { + "source": "iana", + "extensions": ["mc1"] + }, + "application/vnd.mediastation.cdkey": { + "source": "iana", + "extensions": ["cdkey"] + }, + "application/vnd.meridian-slingshot": { + "source": "iana" + }, + "application/vnd.mfer": { + "source": "iana", + "extensions": ["mwf"] + }, + "application/vnd.mfmp": { + "source": "iana", + "extensions": ["mfm"] + }, + "application/vnd.micro+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.micrografx.flo": { + "source": "iana", + "extensions": ["flo"] + }, + "application/vnd.micrografx.igx": { + "source": "iana", + "extensions": ["igx"] + }, + "application/vnd.microsoft.portable-executable": { + "source": "iana" + }, + "application/vnd.microsoft.windows.thumbnail-cache": { + "source": "iana" + }, + "application/vnd.miele+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.mif": { + "source": "iana", + "extensions": ["mif"] + }, + "application/vnd.minisoft-hp3000-save": { + "source": "iana" + }, + "application/vnd.mitsubishi.misty-guard.trustweb": { + "source": "iana" + }, + "application/vnd.mobius.daf": { + "source": "iana", + "extensions": ["daf"] + }, + "application/vnd.mobius.dis": { + "source": "iana", + "extensions": ["dis"] + }, + "application/vnd.mobius.mbk": { + "source": "iana", + "extensions": ["mbk"] + }, + "application/vnd.mobius.mqy": { + "source": "iana", + "extensions": ["mqy"] + }, + "application/vnd.mobius.msl": { + "source": "iana", + "extensions": ["msl"] + }, + "application/vnd.mobius.plc": { + "source": "iana", + "extensions": ["plc"] + }, + "application/vnd.mobius.txf": { + "source": "iana", + "extensions": ["txf"] + }, + "application/vnd.mophun.application": { + "source": "iana", + "extensions": ["mpn"] + }, + "application/vnd.mophun.certificate": { + "source": "iana", + "extensions": ["mpc"] + }, + "application/vnd.motorola.flexsuite": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.adsi": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.fis": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.gotap": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.kmr": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.ttc": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.wem": { + "source": "iana" + }, + "application/vnd.motorola.iprm": { + "source": "iana" + }, + "application/vnd.mozilla.xul+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xul"] + }, + "application/vnd.ms-3mfdocument": { + "source": "iana" + }, + "application/vnd.ms-artgalry": { + "source": "iana", + "extensions": ["cil"] + }, + "application/vnd.ms-asf": { + "source": "iana" + }, + "application/vnd.ms-cab-compressed": { + "source": "iana", + "extensions": ["cab"] + }, + "application/vnd.ms-color.iccprofile": { + "source": "apache" + }, + "application/vnd.ms-excel": { + "source": "iana", + "compressible": false, + "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] + }, + "application/vnd.ms-excel.addin.macroenabled.12": { + "source": "iana", + "extensions": ["xlam"] + }, + "application/vnd.ms-excel.sheet.binary.macroenabled.12": { + "source": "iana", + "extensions": ["xlsb"] + }, + "application/vnd.ms-excel.sheet.macroenabled.12": { + "source": "iana", + "extensions": ["xlsm"] + }, + "application/vnd.ms-excel.template.macroenabled.12": { + "source": "iana", + "extensions": ["xltm"] + }, + "application/vnd.ms-fontobject": { + "source": "iana", + "compressible": true, + "extensions": ["eot"] + }, + "application/vnd.ms-htmlhelp": { + "source": "iana", + "extensions": ["chm"] + }, + "application/vnd.ms-ims": { + "source": "iana", + "extensions": ["ims"] + }, + "application/vnd.ms-lrm": { + "source": "iana", + "extensions": ["lrm"] + }, + "application/vnd.ms-office.activex+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-officetheme": { + "source": "iana", + "extensions": ["thmx"] + }, + "application/vnd.ms-opentype": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-outlook": { + "compressible": false, + "extensions": ["msg"] + }, + "application/vnd.ms-package.obfuscated-opentype": { + "source": "apache" + }, + "application/vnd.ms-pki.seccat": { + "source": "apache", + "extensions": ["cat"] + }, + "application/vnd.ms-pki.stl": { + "source": "apache", + "extensions": ["stl"] + }, + "application/vnd.ms-playready.initiator+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-powerpoint": { + "source": "iana", + "compressible": false, + "extensions": ["ppt","pps","pot"] + }, + "application/vnd.ms-powerpoint.addin.macroenabled.12": { + "source": "iana", + "extensions": ["ppam"] + }, + "application/vnd.ms-powerpoint.presentation.macroenabled.12": { + "source": "iana", + "extensions": ["pptm"] + }, + "application/vnd.ms-powerpoint.slide.macroenabled.12": { + "source": "iana", + "extensions": ["sldm"] + }, + "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { + "source": "iana", + "extensions": ["ppsm"] + }, + "application/vnd.ms-powerpoint.template.macroenabled.12": { + "source": "iana", + "extensions": ["potm"] + }, + "application/vnd.ms-printdevicecapabilities+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-printing.printticket+xml": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-printschematicket+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-project": { + "source": "iana", + "extensions": ["mpp","mpt"] + }, + "application/vnd.ms-tnef": { + "source": "iana" + }, + "application/vnd.ms-windows.devicepairing": { + "source": "iana" + }, + "application/vnd.ms-windows.nwprinting.oob": { + "source": "iana" + }, + "application/vnd.ms-windows.printerpairing": { + "source": "iana" + }, + "application/vnd.ms-windows.wsd.oob": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-resp": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-resp": { + "source": "iana" + }, + "application/vnd.ms-word.document.macroenabled.12": { + "source": "iana", + "extensions": ["docm"] + }, + "application/vnd.ms-word.template.macroenabled.12": { + "source": "iana", + "extensions": ["dotm"] + }, + "application/vnd.ms-works": { + "source": "iana", + "extensions": ["wps","wks","wcm","wdb"] + }, + "application/vnd.ms-wpl": { + "source": "iana", + "extensions": ["wpl"] + }, + "application/vnd.ms-xpsdocument": { + "source": "iana", + "compressible": false, + "extensions": ["xps"] + }, + "application/vnd.msa-disk-image": { + "source": "iana" + }, + "application/vnd.mseq": { + "source": "iana", + "extensions": ["mseq"] + }, + "application/vnd.msign": { + "source": "iana" + }, + "application/vnd.multiad.creator": { + "source": "iana" + }, + "application/vnd.multiad.creator.cif": { + "source": "iana" + }, + "application/vnd.music-niff": { + "source": "iana" + }, + "application/vnd.musician": { + "source": "iana", + "extensions": ["mus"] + }, + "application/vnd.muvee.style": { + "source": "iana", + "extensions": ["msty"] + }, + "application/vnd.mynfc": { + "source": "iana", + "extensions": ["taglet"] + }, + "application/vnd.nacamar.ybrid+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ncd.control": { + "source": "iana" + }, + "application/vnd.ncd.reference": { + "source": "iana" + }, + "application/vnd.nearst.inv+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.nebumind.line": { + "source": "iana" + }, + "application/vnd.nervana": { + "source": "iana" + }, + "application/vnd.netfpx": { + "source": "iana" + }, + "application/vnd.neurolanguage.nlu": { + "source": "iana", + "extensions": ["nlu"] + }, + "application/vnd.nimn": { + "source": "iana" + }, + "application/vnd.nintendo.nitro.rom": { + "source": "iana" + }, + "application/vnd.nintendo.snes.rom": { + "source": "iana" + }, + "application/vnd.nitf": { + "source": "iana", + "extensions": ["ntf","nitf"] + }, + "application/vnd.noblenet-directory": { + "source": "iana", + "extensions": ["nnd"] + }, + "application/vnd.noblenet-sealer": { + "source": "iana", + "extensions": ["nns"] + }, + "application/vnd.noblenet-web": { + "source": "iana", + "extensions": ["nnw"] + }, + "application/vnd.nokia.catalogs": { + "source": "iana" + }, + "application/vnd.nokia.conml+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.conml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.iptv.config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.isds-radio-presets": { + "source": "iana" + }, + "application/vnd.nokia.landmark+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.landmark+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.landmarkcollection+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.n-gage.ac+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ac"] + }, + "application/vnd.nokia.n-gage.data": { + "source": "iana", + "extensions": ["ngdat"] + }, + "application/vnd.nokia.n-gage.symbian.install": { + "source": "iana", + "extensions": ["n-gage"] + }, + "application/vnd.nokia.ncd": { + "source": "iana" + }, + "application/vnd.nokia.pcd+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.pcd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.radio-preset": { + "source": "iana", + "extensions": ["rpst"] + }, + "application/vnd.nokia.radio-presets": { + "source": "iana", + "extensions": ["rpss"] + }, + "application/vnd.novadigm.edm": { + "source": "iana", + "extensions": ["edm"] + }, + "application/vnd.novadigm.edx": { + "source": "iana", + "extensions": ["edx"] + }, + "application/vnd.novadigm.ext": { + "source": "iana", + "extensions": ["ext"] + }, + "application/vnd.ntt-local.content-share": { + "source": "iana" + }, + "application/vnd.ntt-local.file-transfer": { + "source": "iana" + }, + "application/vnd.ntt-local.ogw_remote-access": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_remote": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_tcp_stream": { + "source": "iana" + }, + "application/vnd.oasis.opendocument.chart": { + "source": "iana", + "extensions": ["odc"] + }, + "application/vnd.oasis.opendocument.chart-template": { + "source": "iana", + "extensions": ["otc"] + }, + "application/vnd.oasis.opendocument.database": { + "source": "iana", + "extensions": ["odb"] + }, + "application/vnd.oasis.opendocument.formula": { + "source": "iana", + "extensions": ["odf"] + }, + "application/vnd.oasis.opendocument.formula-template": { + "source": "iana", + "extensions": ["odft"] + }, + "application/vnd.oasis.opendocument.graphics": { + "source": "iana", + "compressible": false, + "extensions": ["odg"] + }, + "application/vnd.oasis.opendocument.graphics-template": { + "source": "iana", + "extensions": ["otg"] + }, + "application/vnd.oasis.opendocument.image": { + "source": "iana", + "extensions": ["odi"] + }, + "application/vnd.oasis.opendocument.image-template": { + "source": "iana", + "extensions": ["oti"] + }, + "application/vnd.oasis.opendocument.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["odp"] + }, + "application/vnd.oasis.opendocument.presentation-template": { + "source": "iana", + "extensions": ["otp"] + }, + "application/vnd.oasis.opendocument.spreadsheet": { + "source": "iana", + "compressible": false, + "extensions": ["ods"] + }, + "application/vnd.oasis.opendocument.spreadsheet-template": { + "source": "iana", + "extensions": ["ots"] + }, + "application/vnd.oasis.opendocument.text": { + "source": "iana", + "compressible": false, + "extensions": ["odt"] + }, + "application/vnd.oasis.opendocument.text-master": { + "source": "iana", + "extensions": ["odm"] + }, + "application/vnd.oasis.opendocument.text-template": { + "source": "iana", + "extensions": ["ott"] + }, + "application/vnd.oasis.opendocument.text-web": { + "source": "iana", + "extensions": ["oth"] + }, + "application/vnd.obn": { + "source": "iana" + }, + "application/vnd.ocf+cbor": { + "source": "iana" + }, + "application/vnd.oci.image.manifest.v1+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oftn.l10n+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessdownload+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessstreaming+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.cspg-hexbinary": { + "source": "iana" + }, + "application/vnd.oipf.dae.svg+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.dae.xhtml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.mippvcontrolmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.pae.gem": { + "source": "iana" + }, + "application/vnd.oipf.spdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.spdlist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.ueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.userprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.olpc-sugar": { + "source": "iana", + "extensions": ["xo"] + }, + "application/vnd.oma-scws-config": { + "source": "iana" + }, + "application/vnd.oma-scws-http-request": { + "source": "iana" + }, + "application/vnd.oma-scws-http-response": { + "source": "iana" + }, + "application/vnd.oma.bcast.associated-procedure-parameter+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.drm-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.imd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.ltkm": { + "source": "iana" + }, + "application/vnd.oma.bcast.notification+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.provisioningtrigger": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgboot": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sgdu": { + "source": "iana" + }, + "application/vnd.oma.bcast.simple-symbol-container": { + "source": "iana" + }, + "application/vnd.oma.bcast.smartcard-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sprov+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.stkm": { + "source": "iana" + }, + "application/vnd.oma.cab-address-book+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-feature-handler+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-pcc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-subs-invite+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-user-prefs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.dcd": { + "source": "iana" + }, + "application/vnd.oma.dcdc": { + "source": "iana" + }, + "application/vnd.oma.dd2+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dd2"] + }, + "application/vnd.oma.drm.risd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.group-usage-list+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+cbor": { + "source": "iana" + }, + "application/vnd.oma.lwm2m+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+tlv": { + "source": "iana" + }, + "application/vnd.oma.pal+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.detailed-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.final-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.groups+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.invocation-descriptor+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.optimized-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.push": { + "source": "iana" + }, + "application/vnd.oma.scidm.messages+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.xcap-directory+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-email+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-file+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-folder+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omaloc-supl-init": { + "source": "iana" + }, + "application/vnd.onepager": { + "source": "iana" + }, + "application/vnd.onepagertamp": { + "source": "iana" + }, + "application/vnd.onepagertamx": { + "source": "iana" + }, + "application/vnd.onepagertat": { + "source": "iana" + }, + "application/vnd.onepagertatp": { + "source": "iana" + }, + "application/vnd.onepagertatx": { + "source": "iana" + }, + "application/vnd.openblox.game+xml": { + "source": "iana", + "compressible": true, + "extensions": ["obgx"] + }, + "application/vnd.openblox.game-binary": { + "source": "iana" + }, + "application/vnd.openeye.oeb": { + "source": "iana" + }, + "application/vnd.openofficeorg.extension": { + "source": "apache", + "extensions": ["oxt"] + }, + "application/vnd.openstreetmap.data+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osm"] + }, + "application/vnd.opentimestamps.ots": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.custom-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawing+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.extended-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["pptx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide": { + "source": "iana", + "extensions": ["sldx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { + "source": "iana", + "extensions": ["ppsx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.template": { + "source": "iana", + "extensions": ["potx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { + "source": "iana", + "compressible": false, + "extensions": ["xlsx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { + "source": "iana", + "extensions": ["xltx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.theme+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.themeoverride+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.vmldrawing": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { + "source": "iana", + "compressible": false, + "extensions": ["docx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { + "source": "iana", + "extensions": ["dotx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.core-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.relationships+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oracle.resource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.orange.indata": { + "source": "iana" + }, + "application/vnd.osa.netdeploy": { + "source": "iana" + }, + "application/vnd.osgeo.mapguide.package": { + "source": "iana", + "extensions": ["mgp"] + }, + "application/vnd.osgi.bundle": { + "source": "iana" + }, + "application/vnd.osgi.dp": { + "source": "iana", + "extensions": ["dp"] + }, + "application/vnd.osgi.subsystem": { + "source": "iana", + "extensions": ["esa"] + }, + "application/vnd.otps.ct-kip+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oxli.countgraph": { + "source": "iana" + }, + "application/vnd.pagerduty+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.palm": { + "source": "iana", + "extensions": ["pdb","pqa","oprc"] + }, + "application/vnd.panoply": { + "source": "iana" + }, + "application/vnd.paos.xml": { + "source": "iana" + }, + "application/vnd.patentdive": { + "source": "iana" + }, + "application/vnd.patientecommsdoc": { + "source": "iana" + }, + "application/vnd.pawaafile": { + "source": "iana", + "extensions": ["paw"] + }, + "application/vnd.pcos": { + "source": "iana" + }, + "application/vnd.pg.format": { + "source": "iana", + "extensions": ["str"] + }, + "application/vnd.pg.osasli": { + "source": "iana", + "extensions": ["ei6"] + }, + "application/vnd.piaccess.application-licence": { + "source": "iana" + }, + "application/vnd.picsel": { + "source": "iana", + "extensions": ["efif"] + }, + "application/vnd.pmi.widget": { + "source": "iana", + "extensions": ["wg"] + }, + "application/vnd.poc.group-advertisement+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.pocketlearn": { + "source": "iana", + "extensions": ["plf"] + }, + "application/vnd.powerbuilder6": { + "source": "iana", + "extensions": ["pbd"] + }, + "application/vnd.powerbuilder6-s": { + "source": "iana" + }, + "application/vnd.powerbuilder7": { + "source": "iana" + }, + "application/vnd.powerbuilder7-s": { + "source": "iana" + }, + "application/vnd.powerbuilder75": { + "source": "iana" + }, + "application/vnd.powerbuilder75-s": { + "source": "iana" + }, + "application/vnd.preminet": { + "source": "iana" + }, + "application/vnd.previewsystems.box": { + "source": "iana", + "extensions": ["box"] + }, + "application/vnd.proteus.magazine": { + "source": "iana", + "extensions": ["mgz"] + }, + "application/vnd.psfs": { + "source": "iana" + }, + "application/vnd.publishare-delta-tree": { + "source": "iana", + "extensions": ["qps"] + }, + "application/vnd.pvi.ptid1": { + "source": "iana", + "extensions": ["ptid"] + }, + "application/vnd.pwg-multiplexed": { + "source": "iana" + }, + "application/vnd.pwg-xhtml-print+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.qualcomm.brew-app-res": { + "source": "iana" + }, + "application/vnd.quarantainenet": { + "source": "iana" + }, + "application/vnd.quark.quarkxpress": { + "source": "iana", + "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] + }, + "application/vnd.quobject-quoxdocument": { + "source": "iana" + }, + "application/vnd.radisys.moml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-stream+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-base+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-detect+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-group+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-speech+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-transform+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.rainstor.data": { + "source": "iana" + }, + "application/vnd.rapid": { + "source": "iana" + }, + "application/vnd.rar": { + "source": "iana", + "extensions": ["rar"] + }, + "application/vnd.realvnc.bed": { + "source": "iana", + "extensions": ["bed"] + }, + "application/vnd.recordare.musicxml": { + "source": "iana", + "extensions": ["mxl"] + }, + "application/vnd.recordare.musicxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musicxml"] + }, + "application/vnd.renlearn.rlprint": { + "source": "iana" + }, + "application/vnd.resilient.logic": { + "source": "iana" + }, + "application/vnd.restful+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.rig.cryptonote": { + "source": "iana", + "extensions": ["cryptonote"] + }, + "application/vnd.rim.cod": { + "source": "apache", + "extensions": ["cod"] + }, + "application/vnd.rn-realmedia": { + "source": "apache", + "extensions": ["rm"] + }, + "application/vnd.rn-realmedia-vbr": { + "source": "apache", + "extensions": ["rmvb"] + }, + "application/vnd.route66.link66+xml": { + "source": "iana", + "compressible": true, + "extensions": ["link66"] + }, + "application/vnd.rs-274x": { + "source": "iana" + }, + "application/vnd.ruckus.download": { + "source": "iana" + }, + "application/vnd.s3sms": { + "source": "iana" + }, + "application/vnd.sailingtracker.track": { + "source": "iana", + "extensions": ["st"] + }, + "application/vnd.sar": { + "source": "iana" + }, + "application/vnd.sbm.cid": { + "source": "iana" + }, + "application/vnd.sbm.mid2": { + "source": "iana" + }, + "application/vnd.scribus": { + "source": "iana" + }, + "application/vnd.sealed.3df": { + "source": "iana" + }, + "application/vnd.sealed.csf": { + "source": "iana" + }, + "application/vnd.sealed.doc": { + "source": "iana" + }, + "application/vnd.sealed.eml": { + "source": "iana" + }, + "application/vnd.sealed.mht": { + "source": "iana" + }, + "application/vnd.sealed.net": { + "source": "iana" + }, + "application/vnd.sealed.ppt": { + "source": "iana" + }, + "application/vnd.sealed.tiff": { + "source": "iana" + }, + "application/vnd.sealed.xls": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.html": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.pdf": { + "source": "iana" + }, + "application/vnd.seemail": { + "source": "iana", + "extensions": ["see"] + }, + "application/vnd.seis+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.sema": { + "source": "iana", + "extensions": ["sema"] + }, + "application/vnd.semd": { + "source": "iana", + "extensions": ["semd"] + }, + "application/vnd.semf": { + "source": "iana", + "extensions": ["semf"] + }, + "application/vnd.shade-save-file": { + "source": "iana" + }, + "application/vnd.shana.informed.formdata": { + "source": "iana", + "extensions": ["ifm"] + }, + "application/vnd.shana.informed.formtemplate": { + "source": "iana", + "extensions": ["itp"] + }, + "application/vnd.shana.informed.interchange": { + "source": "iana", + "extensions": ["iif"] + }, + "application/vnd.shana.informed.package": { + "source": "iana", + "extensions": ["ipk"] + }, + "application/vnd.shootproof+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shopkick+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shp": { + "source": "iana" + }, + "application/vnd.shx": { + "source": "iana" + }, + "application/vnd.sigrok.session": { + "source": "iana" + }, + "application/vnd.simtech-mindmapper": { + "source": "iana", + "extensions": ["twd","twds"] + }, + "application/vnd.siren+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.smaf": { + "source": "iana", + "extensions": ["mmf"] + }, + "application/vnd.smart.notebook": { + "source": "iana" + }, + "application/vnd.smart.teacher": { + "source": "iana", + "extensions": ["teacher"] + }, + "application/vnd.snesdev-page-table": { + "source": "iana" + }, + "application/vnd.software602.filler.form+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fo"] + }, + "application/vnd.software602.filler.form-xml-zip": { + "source": "iana" + }, + "application/vnd.solent.sdkm+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sdkm","sdkd"] + }, + "application/vnd.spotfire.dxp": { + "source": "iana", + "extensions": ["dxp"] + }, + "application/vnd.spotfire.sfs": { + "source": "iana", + "extensions": ["sfs"] + }, + "application/vnd.sqlite3": { + "source": "iana" + }, + "application/vnd.sss-cod": { + "source": "iana" + }, + "application/vnd.sss-dtf": { + "source": "iana" + }, + "application/vnd.sss-ntf": { + "source": "iana" + }, + "application/vnd.stardivision.calc": { + "source": "apache", + "extensions": ["sdc"] + }, + "application/vnd.stardivision.draw": { + "source": "apache", + "extensions": ["sda"] + }, + "application/vnd.stardivision.impress": { + "source": "apache", + "extensions": ["sdd"] + }, + "application/vnd.stardivision.math": { + "source": "apache", + "extensions": ["smf"] + }, + "application/vnd.stardivision.writer": { + "source": "apache", + "extensions": ["sdw","vor"] + }, + "application/vnd.stardivision.writer-global": { + "source": "apache", + "extensions": ["sgl"] + }, + "application/vnd.stepmania.package": { + "source": "iana", + "extensions": ["smzip"] + }, + "application/vnd.stepmania.stepchart": { + "source": "iana", + "extensions": ["sm"] + }, + "application/vnd.street-stream": { + "source": "iana" + }, + "application/vnd.sun.wadl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wadl"] + }, + "application/vnd.sun.xml.calc": { + "source": "apache", + "extensions": ["sxc"] + }, + "application/vnd.sun.xml.calc.template": { + "source": "apache", + "extensions": ["stc"] + }, + "application/vnd.sun.xml.draw": { + "source": "apache", + "extensions": ["sxd"] + }, + "application/vnd.sun.xml.draw.template": { + "source": "apache", + "extensions": ["std"] + }, + "application/vnd.sun.xml.impress": { + "source": "apache", + "extensions": ["sxi"] + }, + "application/vnd.sun.xml.impress.template": { + "source": "apache", + "extensions": ["sti"] + }, + "application/vnd.sun.xml.math": { + "source": "apache", + "extensions": ["sxm"] + }, + "application/vnd.sun.xml.writer": { + "source": "apache", + "extensions": ["sxw"] + }, + "application/vnd.sun.xml.writer.global": { + "source": "apache", + "extensions": ["sxg"] + }, + "application/vnd.sun.xml.writer.template": { + "source": "apache", + "extensions": ["stw"] + }, + "application/vnd.sus-calendar": { + "source": "iana", + "extensions": ["sus","susp"] + }, + "application/vnd.svd": { + "source": "iana", + "extensions": ["svd"] + }, + "application/vnd.swiftview-ics": { + "source": "iana" + }, + "application/vnd.sycle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.syft+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.symbian.install": { + "source": "apache", + "extensions": ["sis","sisx"] + }, + "application/vnd.syncml+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xsm"] + }, + "application/vnd.syncml.dm+wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["bdm"] + }, + "application/vnd.syncml.dm+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xdm"] + }, + "application/vnd.syncml.dm.notification": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["ddf"] + }, + "application/vnd.syncml.dmtnds+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.syncml.ds.notification": { + "source": "iana" + }, + "application/vnd.tableschema+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tao.intent-module-archive": { + "source": "iana", + "extensions": ["tao"] + }, + "application/vnd.tcpdump.pcap": { + "source": "iana", + "extensions": ["pcap","cap","dmp"] + }, + "application/vnd.think-cell.ppttc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tmd.mediaflex.api+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.tml": { + "source": "iana" + }, + "application/vnd.tmobile-livetv": { + "source": "iana", + "extensions": ["tmo"] + }, + "application/vnd.tri.onesource": { + "source": "iana" + }, + "application/vnd.trid.tpt": { + "source": "iana", + "extensions": ["tpt"] + }, + "application/vnd.triscape.mxs": { + "source": "iana", + "extensions": ["mxs"] + }, + "application/vnd.trueapp": { + "source": "iana", + "extensions": ["tra"] + }, + "application/vnd.truedoc": { + "source": "iana" + }, + "application/vnd.ubisoft.webplayer": { + "source": "iana" + }, + "application/vnd.ufdl": { + "source": "iana", + "extensions": ["ufd","ufdl"] + }, + "application/vnd.uiq.theme": { + "source": "iana", + "extensions": ["utz"] + }, + "application/vnd.umajin": { + "source": "iana", + "extensions": ["umj"] + }, + "application/vnd.unity": { + "source": "iana", + "extensions": ["unityweb"] + }, + "application/vnd.uoml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uoml"] + }, + "application/vnd.uplanet.alert": { + "source": "iana" + }, + "application/vnd.uplanet.alert-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.channel": { + "source": "iana" + }, + "application/vnd.uplanet.channel-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.list": { + "source": "iana" + }, + "application/vnd.uplanet.list-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.signal": { + "source": "iana" + }, + "application/vnd.uri-map": { + "source": "iana" + }, + "application/vnd.valve.source.material": { + "source": "iana" + }, + "application/vnd.vcx": { + "source": "iana", + "extensions": ["vcx"] + }, + "application/vnd.vd-study": { + "source": "iana" + }, + "application/vnd.vectorworks": { + "source": "iana" + }, + "application/vnd.vel+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.verimatrix.vcas": { + "source": "iana" + }, + "application/vnd.veritone.aion+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.veryant.thin": { + "source": "iana" + }, + "application/vnd.ves.encrypted": { + "source": "iana" + }, + "application/vnd.vidsoft.vidconference": { + "source": "iana" + }, + "application/vnd.visio": { + "source": "iana", + "extensions": ["vsd","vst","vss","vsw"] + }, + "application/vnd.visionary": { + "source": "iana", + "extensions": ["vis"] + }, + "application/vnd.vividence.scriptfile": { + "source": "iana" + }, + "application/vnd.vsf": { + "source": "iana", + "extensions": ["vsf"] + }, + "application/vnd.wap.sic": { + "source": "iana" + }, + "application/vnd.wap.slc": { + "source": "iana" + }, + "application/vnd.wap.wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["wbxml"] + }, + "application/vnd.wap.wmlc": { + "source": "iana", + "extensions": ["wmlc"] + }, + "application/vnd.wap.wmlscriptc": { + "source": "iana", + "extensions": ["wmlsc"] + }, + "application/vnd.webturbo": { + "source": "iana", + "extensions": ["wtb"] + }, + "application/vnd.wfa.dpp": { + "source": "iana" + }, + "application/vnd.wfa.p2p": { + "source": "iana" + }, + "application/vnd.wfa.wsc": { + "source": "iana" + }, + "application/vnd.windows.devicepairing": { + "source": "iana" + }, + "application/vnd.wmc": { + "source": "iana" + }, + "application/vnd.wmf.bootstrap": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica.package": { + "source": "iana" + }, + "application/vnd.wolfram.player": { + "source": "iana", + "extensions": ["nbp"] + }, + "application/vnd.wordperfect": { + "source": "iana", + "extensions": ["wpd"] + }, + "application/vnd.wqd": { + "source": "iana", + "extensions": ["wqd"] + }, + "application/vnd.wrq-hp3000-labelled": { + "source": "iana" + }, + "application/vnd.wt.stf": { + "source": "iana", + "extensions": ["stf"] + }, + "application/vnd.wv.csp+wbxml": { + "source": "iana" + }, + "application/vnd.wv.csp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.wv.ssp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xacml+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.xara": { + "source": "iana", + "extensions": ["xar"] + }, + "application/vnd.xfdl": { + "source": "iana", + "extensions": ["xfdl"] + }, + "application/vnd.xfdl.webform": { + "source": "iana" + }, + "application/vnd.xmi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xmpie.cpkg": { + "source": "iana" + }, + "application/vnd.xmpie.dpkg": { + "source": "iana" + }, + "application/vnd.xmpie.plan": { + "source": "iana" + }, + "application/vnd.xmpie.ppkg": { + "source": "iana" + }, + "application/vnd.xmpie.xlim": { + "source": "iana" + }, + "application/vnd.yamaha.hv-dic": { + "source": "iana", + "extensions": ["hvd"] + }, + "application/vnd.yamaha.hv-script": { + "source": "iana", + "extensions": ["hvs"] + }, + "application/vnd.yamaha.hv-voice": { + "source": "iana", + "extensions": ["hvp"] + }, + "application/vnd.yamaha.openscoreformat": { + "source": "iana", + "extensions": ["osf"] + }, + "application/vnd.yamaha.openscoreformat.osfpvg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osfpvg"] + }, + "application/vnd.yamaha.remote-setup": { + "source": "iana" + }, + "application/vnd.yamaha.smaf-audio": { + "source": "iana", + "extensions": ["saf"] + }, + "application/vnd.yamaha.smaf-phrase": { + "source": "iana", + "extensions": ["spf"] + }, + "application/vnd.yamaha.through-ngn": { + "source": "iana" + }, + "application/vnd.yamaha.tunnel-udpencap": { + "source": "iana" + }, + "application/vnd.yaoweme": { + "source": "iana" + }, + "application/vnd.yellowriver-custom-menu": { + "source": "iana", + "extensions": ["cmp"] + }, + "application/vnd.youtube.yt": { + "source": "iana" + }, + "application/vnd.zul": { + "source": "iana", + "extensions": ["zir","zirz"] + }, + "application/vnd.zzazz.deck+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zaz"] + }, + "application/voicexml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["vxml"] + }, + "application/voucher-cms+json": { + "source": "iana", + "compressible": true + }, + "application/vq-rtcpxr": { + "source": "iana" + }, + "application/wasm": { + "source": "iana", + "compressible": true, + "extensions": ["wasm"] + }, + "application/watcherinfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wif"] + }, + "application/webpush-options+json": { + "source": "iana", + "compressible": true + }, + "application/whoispp-query": { + "source": "iana" + }, + "application/whoispp-response": { + "source": "iana" + }, + "application/widget": { + "source": "iana", + "extensions": ["wgt"] + }, + "application/winhlp": { + "source": "apache", + "extensions": ["hlp"] + }, + "application/wita": { + "source": "iana" + }, + "application/wordperfect5.1": { + "source": "iana" + }, + "application/wsdl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wsdl"] + }, + "application/wspolicy+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wspolicy"] + }, + "application/x-7z-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["7z"] + }, + "application/x-abiword": { + "source": "apache", + "extensions": ["abw"] + }, + "application/x-ace-compressed": { + "source": "apache", + "extensions": ["ace"] + }, + "application/x-amf": { + "source": "apache" + }, + "application/x-apple-diskimage": { + "source": "apache", + "extensions": ["dmg"] + }, + "application/x-arj": { + "compressible": false, + "extensions": ["arj"] + }, + "application/x-authorware-bin": { + "source": "apache", + "extensions": ["aab","x32","u32","vox"] + }, + "application/x-authorware-map": { + "source": "apache", + "extensions": ["aam"] + }, + "application/x-authorware-seg": { + "source": "apache", + "extensions": ["aas"] + }, + "application/x-bcpio": { + "source": "apache", + "extensions": ["bcpio"] + }, + "application/x-bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/x-bittorrent": { + "source": "apache", + "extensions": ["torrent"] + }, + "application/x-blorb": { + "source": "apache", + "extensions": ["blb","blorb"] + }, + "application/x-bzip": { + "source": "apache", + "compressible": false, + "extensions": ["bz"] + }, + "application/x-bzip2": { + "source": "apache", + "compressible": false, + "extensions": ["bz2","boz"] + }, + "application/x-cbr": { + "source": "apache", + "extensions": ["cbr","cba","cbt","cbz","cb7"] + }, + "application/x-cdlink": { + "source": "apache", + "extensions": ["vcd"] + }, + "application/x-cfs-compressed": { + "source": "apache", + "extensions": ["cfs"] + }, + "application/x-chat": { + "source": "apache", + "extensions": ["chat"] + }, + "application/x-chess-pgn": { + "source": "apache", + "extensions": ["pgn"] + }, + "application/x-chrome-extension": { + "extensions": ["crx"] + }, + "application/x-cocoa": { + "source": "nginx", + "extensions": ["cco"] + }, + "application/x-compress": { + "source": "apache" + }, + "application/x-conference": { + "source": "apache", + "extensions": ["nsc"] + }, + "application/x-cpio": { + "source": "apache", + "extensions": ["cpio"] + }, + "application/x-csh": { + "source": "apache", + "extensions": ["csh"] + }, + "application/x-deb": { + "compressible": false + }, + "application/x-debian-package": { + "source": "apache", + "extensions": ["deb","udeb"] + }, + "application/x-dgc-compressed": { + "source": "apache", + "extensions": ["dgc"] + }, + "application/x-director": { + "source": "apache", + "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] + }, + "application/x-doom": { + "source": "apache", + "extensions": ["wad"] + }, + "application/x-dtbncx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ncx"] + }, + "application/x-dtbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dtb"] + }, + "application/x-dtbresource+xml": { + "source": "apache", + "compressible": true, + "extensions": ["res"] + }, + "application/x-dvi": { + "source": "apache", + "compressible": false, + "extensions": ["dvi"] + }, + "application/x-envoy": { + "source": "apache", + "extensions": ["evy"] + }, + "application/x-eva": { + "source": "apache", + "extensions": ["eva"] + }, + "application/x-font-bdf": { + "source": "apache", + "extensions": ["bdf"] + }, + "application/x-font-dos": { + "source": "apache" + }, + "application/x-font-framemaker": { + "source": "apache" + }, + "application/x-font-ghostscript": { + "source": "apache", + "extensions": ["gsf"] + }, + "application/x-font-libgrx": { + "source": "apache" + }, + "application/x-font-linux-psf": { + "source": "apache", + "extensions": ["psf"] + }, + "application/x-font-pcf": { + "source": "apache", + "extensions": ["pcf"] + }, + "application/x-font-snf": { + "source": "apache", + "extensions": ["snf"] + }, + "application/x-font-speedo": { + "source": "apache" + }, + "application/x-font-sunos-news": { + "source": "apache" + }, + "application/x-font-type1": { + "source": "apache", + "extensions": ["pfa","pfb","pfm","afm"] + }, + "application/x-font-vfont": { + "source": "apache" + }, + "application/x-freearc": { + "source": "apache", + "extensions": ["arc"] + }, + "application/x-futuresplash": { + "source": "apache", + "extensions": ["spl"] + }, + "application/x-gca-compressed": { + "source": "apache", + "extensions": ["gca"] + }, + "application/x-glulx": { + "source": "apache", + "extensions": ["ulx"] + }, + "application/x-gnumeric": { + "source": "apache", + "extensions": ["gnumeric"] + }, + "application/x-gramps-xml": { + "source": "apache", + "extensions": ["gramps"] + }, + "application/x-gtar": { + "source": "apache", + "extensions": ["gtar"] + }, + "application/x-gzip": { + "source": "apache" + }, + "application/x-hdf": { + "source": "apache", + "extensions": ["hdf"] + }, + "application/x-httpd-php": { + "compressible": true, + "extensions": ["php"] + }, + "application/x-install-instructions": { + "source": "apache", + "extensions": ["install"] + }, + "application/x-iso9660-image": { + "source": "apache", + "extensions": ["iso"] + }, + "application/x-iwork-keynote-sffkey": { + "extensions": ["key"] + }, + "application/x-iwork-numbers-sffnumbers": { + "extensions": ["numbers"] + }, + "application/x-iwork-pages-sffpages": { + "extensions": ["pages"] + }, + "application/x-java-archive-diff": { + "source": "nginx", + "extensions": ["jardiff"] + }, + "application/x-java-jnlp-file": { + "source": "apache", + "compressible": false, + "extensions": ["jnlp"] + }, + "application/x-javascript": { + "compressible": true + }, + "application/x-keepass2": { + "extensions": ["kdbx"] + }, + "application/x-latex": { + "source": "apache", + "compressible": false, + "extensions": ["latex"] + }, + "application/x-lua-bytecode": { + "extensions": ["luac"] + }, + "application/x-lzh-compressed": { + "source": "apache", + "extensions": ["lzh","lha"] + }, + "application/x-makeself": { + "source": "nginx", + "extensions": ["run"] + }, + "application/x-mie": { + "source": "apache", + "extensions": ["mie"] + }, + "application/x-mobipocket-ebook": { + "source": "apache", + "extensions": ["prc","mobi"] + }, + "application/x-mpegurl": { + "compressible": false + }, + "application/x-ms-application": { + "source": "apache", + "extensions": ["application"] + }, + "application/x-ms-shortcut": { + "source": "apache", + "extensions": ["lnk"] + }, + "application/x-ms-wmd": { + "source": "apache", + "extensions": ["wmd"] + }, + "application/x-ms-wmz": { + "source": "apache", + "extensions": ["wmz"] + }, + "application/x-ms-xbap": { + "source": "apache", + "extensions": ["xbap"] + }, + "application/x-msaccess": { + "source": "apache", + "extensions": ["mdb"] + }, + "application/x-msbinder": { + "source": "apache", + "extensions": ["obd"] + }, + "application/x-mscardfile": { + "source": "apache", + "extensions": ["crd"] + }, + "application/x-msclip": { + "source": "apache", + "extensions": ["clp"] + }, + "application/x-msdos-program": { + "extensions": ["exe"] + }, + "application/x-msdownload": { + "source": "apache", + "extensions": ["exe","dll","com","bat","msi"] + }, + "application/x-msmediaview": { + "source": "apache", + "extensions": ["mvb","m13","m14"] + }, + "application/x-msmetafile": { + "source": "apache", + "extensions": ["wmf","wmz","emf","emz"] + }, + "application/x-msmoney": { + "source": "apache", + "extensions": ["mny"] + }, + "application/x-mspublisher": { + "source": "apache", + "extensions": ["pub"] + }, + "application/x-msschedule": { + "source": "apache", + "extensions": ["scd"] + }, + "application/x-msterminal": { + "source": "apache", + "extensions": ["trm"] + }, + "application/x-mswrite": { + "source": "apache", + "extensions": ["wri"] + }, + "application/x-netcdf": { + "source": "apache", + "extensions": ["nc","cdf"] + }, + "application/x-ns-proxy-autoconfig": { + "compressible": true, + "extensions": ["pac"] + }, + "application/x-nzb": { + "source": "apache", + "extensions": ["nzb"] + }, + "application/x-perl": { + "source": "nginx", + "extensions": ["pl","pm"] + }, + "application/x-pilot": { + "source": "nginx", + "extensions": ["prc","pdb"] + }, + "application/x-pkcs12": { + "source": "apache", + "compressible": false, + "extensions": ["p12","pfx"] + }, + "application/x-pkcs7-certificates": { + "source": "apache", + "extensions": ["p7b","spc"] + }, + "application/x-pkcs7-certreqresp": { + "source": "apache", + "extensions": ["p7r"] + }, + "application/x-pki-message": { + "source": "iana" + }, + "application/x-rar-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["rar"] + }, + "application/x-redhat-package-manager": { + "source": "nginx", + "extensions": ["rpm"] + }, + "application/x-research-info-systems": { + "source": "apache", + "extensions": ["ris"] + }, + "application/x-sea": { + "source": "nginx", + "extensions": ["sea"] + }, + "application/x-sh": { + "source": "apache", + "compressible": true, + "extensions": ["sh"] + }, + "application/x-shar": { + "source": "apache", + "extensions": ["shar"] + }, + "application/x-shockwave-flash": { + "source": "apache", + "compressible": false, + "extensions": ["swf"] + }, + "application/x-silverlight-app": { + "source": "apache", + "extensions": ["xap"] + }, + "application/x-sql": { + "source": "apache", + "extensions": ["sql"] + }, + "application/x-stuffit": { + "source": "apache", + "compressible": false, + "extensions": ["sit"] + }, + "application/x-stuffitx": { + "source": "apache", + "extensions": ["sitx"] + }, + "application/x-subrip": { + "source": "apache", + "extensions": ["srt"] + }, + "application/x-sv4cpio": { + "source": "apache", + "extensions": ["sv4cpio"] + }, + "application/x-sv4crc": { + "source": "apache", + "extensions": ["sv4crc"] + }, + "application/x-t3vm-image": { + "source": "apache", + "extensions": ["t3"] + }, + "application/x-tads": { + "source": "apache", + "extensions": ["gam"] + }, + "application/x-tar": { + "source": "apache", + "compressible": true, + "extensions": ["tar"] + }, + "application/x-tcl": { + "source": "apache", + "extensions": ["tcl","tk"] + }, + "application/x-tex": { + "source": "apache", + "extensions": ["tex"] + }, + "application/x-tex-tfm": { + "source": "apache", + "extensions": ["tfm"] + }, + "application/x-texinfo": { + "source": "apache", + "extensions": ["texinfo","texi"] + }, + "application/x-tgif": { + "source": "apache", + "extensions": ["obj"] + }, + "application/x-ustar": { + "source": "apache", + "extensions": ["ustar"] + }, + "application/x-virtualbox-hdd": { + "compressible": true, + "extensions": ["hdd"] + }, + "application/x-virtualbox-ova": { + "compressible": true, + "extensions": ["ova"] + }, + "application/x-virtualbox-ovf": { + "compressible": true, + "extensions": ["ovf"] + }, + "application/x-virtualbox-vbox": { + "compressible": true, + "extensions": ["vbox"] + }, + "application/x-virtualbox-vbox-extpack": { + "compressible": false, + "extensions": ["vbox-extpack"] + }, + "application/x-virtualbox-vdi": { + "compressible": true, + "extensions": ["vdi"] + }, + "application/x-virtualbox-vhd": { + "compressible": true, + "extensions": ["vhd"] + }, + "application/x-virtualbox-vmdk": { + "compressible": true, + "extensions": ["vmdk"] + }, + "application/x-wais-source": { + "source": "apache", + "extensions": ["src"] + }, + "application/x-web-app-manifest+json": { + "compressible": true, + "extensions": ["webapp"] + }, + "application/x-www-form-urlencoded": { + "source": "iana", + "compressible": true + }, + "application/x-x509-ca-cert": { + "source": "iana", + "extensions": ["der","crt","pem"] + }, + "application/x-x509-ca-ra-cert": { + "source": "iana" + }, + "application/x-x509-next-ca-cert": { + "source": "iana" + }, + "application/x-xfig": { + "source": "apache", + "extensions": ["fig"] + }, + "application/x-xliff+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xlf"] + }, + "application/x-xpinstall": { + "source": "apache", + "compressible": false, + "extensions": ["xpi"] + }, + "application/x-xz": { + "source": "apache", + "extensions": ["xz"] + }, + "application/x-zmachine": { + "source": "apache", + "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] + }, + "application/x400-bp": { + "source": "iana" + }, + "application/xacml+xml": { + "source": "iana", + "compressible": true + }, + "application/xaml+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xaml"] + }, + "application/xcap-att+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xav"] + }, + "application/xcap-caps+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xca"] + }, + "application/xcap-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdf"] + }, + "application/xcap-el+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xel"] + }, + "application/xcap-error+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-ns+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xns"] + }, + "application/xcon-conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/xcon-conference-info-diff+xml": { + "source": "iana", + "compressible": true + }, + "application/xenc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xenc"] + }, + "application/xhtml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xhtml","xht"] + }, + "application/xhtml-voice+xml": { + "source": "apache", + "compressible": true + }, + "application/xliff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xlf"] + }, + "application/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml","xsl","xsd","rng"] + }, + "application/xml-dtd": { + "source": "iana", + "compressible": true, + "extensions": ["dtd"] + }, + "application/xml-external-parsed-entity": { + "source": "iana" + }, + "application/xml-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/xmpp+xml": { + "source": "iana", + "compressible": true + }, + "application/xop+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xop"] + }, + "application/xproc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xpl"] + }, + "application/xslt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xsl","xslt"] + }, + "application/xspf+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xspf"] + }, + "application/xv+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mxml","xhvml","xvml","xvm"] + }, + "application/yang": { + "source": "iana", + "extensions": ["yang"] + }, + "application/yang-data+json": { + "source": "iana", + "compressible": true + }, + "application/yang-data+xml": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+json": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/yin+xml": { + "source": "iana", + "compressible": true, + "extensions": ["yin"] + }, + "application/zip": { + "source": "iana", + "compressible": false, + "extensions": ["zip"] + }, + "application/zlib": { + "source": "iana" + }, + "application/zstd": { + "source": "iana" + }, + "audio/1d-interleaved-parityfec": { + "source": "iana" + }, + "audio/32kadpcm": { + "source": "iana" + }, + "audio/3gpp": { + "source": "iana", + "compressible": false, + "extensions": ["3gpp"] + }, + "audio/3gpp2": { + "source": "iana" + }, + "audio/aac": { + "source": "iana" + }, + "audio/ac3": { + "source": "iana" + }, + "audio/adpcm": { + "source": "apache", + "extensions": ["adp"] + }, + "audio/amr": { + "source": "iana", + "extensions": ["amr"] + }, + "audio/amr-wb": { + "source": "iana" + }, + "audio/amr-wb+": { + "source": "iana" + }, + "audio/aptx": { + "source": "iana" + }, + "audio/asc": { + "source": "iana" + }, + "audio/atrac-advanced-lossless": { + "source": "iana" + }, + "audio/atrac-x": { + "source": "iana" + }, + "audio/atrac3": { + "source": "iana" + }, + "audio/basic": { + "source": "iana", + "compressible": false, + "extensions": ["au","snd"] + }, + "audio/bv16": { + "source": "iana" + }, + "audio/bv32": { + "source": "iana" + }, + "audio/clearmode": { + "source": "iana" + }, + "audio/cn": { + "source": "iana" + }, + "audio/dat12": { + "source": "iana" + }, + "audio/dls": { + "source": "iana" + }, + "audio/dsr-es201108": { + "source": "iana" + }, + "audio/dsr-es202050": { + "source": "iana" + }, + "audio/dsr-es202211": { + "source": "iana" + }, + "audio/dsr-es202212": { + "source": "iana" + }, + "audio/dv": { + "source": "iana" + }, + "audio/dvi4": { + "source": "iana" + }, + "audio/eac3": { + "source": "iana" + }, + "audio/encaprtp": { + "source": "iana" + }, + "audio/evrc": { + "source": "iana" + }, + "audio/evrc-qcp": { + "source": "iana" + }, + "audio/evrc0": { + "source": "iana" + }, + "audio/evrc1": { + "source": "iana" + }, + "audio/evrcb": { + "source": "iana" + }, + "audio/evrcb0": { + "source": "iana" + }, + "audio/evrcb1": { + "source": "iana" + }, + "audio/evrcnw": { + "source": "iana" + }, + "audio/evrcnw0": { + "source": "iana" + }, + "audio/evrcnw1": { + "source": "iana" + }, + "audio/evrcwb": { + "source": "iana" + }, + "audio/evrcwb0": { + "source": "iana" + }, + "audio/evrcwb1": { + "source": "iana" + }, + "audio/evs": { + "source": "iana" + }, + "audio/flexfec": { + "source": "iana" + }, + "audio/fwdred": { + "source": "iana" + }, + "audio/g711-0": { + "source": "iana" + }, + "audio/g719": { + "source": "iana" + }, + "audio/g722": { + "source": "iana" + }, + "audio/g7221": { + "source": "iana" + }, + "audio/g723": { + "source": "iana" + }, + "audio/g726-16": { + "source": "iana" + }, + "audio/g726-24": { + "source": "iana" + }, + "audio/g726-32": { + "source": "iana" + }, + "audio/g726-40": { + "source": "iana" + }, + "audio/g728": { + "source": "iana" + }, + "audio/g729": { + "source": "iana" + }, + "audio/g7291": { + "source": "iana" + }, + "audio/g729d": { + "source": "iana" + }, + "audio/g729e": { + "source": "iana" + }, + "audio/gsm": { + "source": "iana" + }, + "audio/gsm-efr": { + "source": "iana" + }, + "audio/gsm-hr-08": { + "source": "iana" + }, + "audio/ilbc": { + "source": "iana" + }, + "audio/ip-mr_v2.5": { + "source": "iana" + }, + "audio/isac": { + "source": "apache" + }, + "audio/l16": { + "source": "iana" + }, + "audio/l20": { + "source": "iana" + }, + "audio/l24": { + "source": "iana", + "compressible": false + }, + "audio/l8": { + "source": "iana" + }, + "audio/lpc": { + "source": "iana" + }, + "audio/melp": { + "source": "iana" + }, + "audio/melp1200": { + "source": "iana" + }, + "audio/melp2400": { + "source": "iana" + }, + "audio/melp600": { + "source": "iana" + }, + "audio/mhas": { + "source": "iana" + }, + "audio/midi": { + "source": "apache", + "extensions": ["mid","midi","kar","rmi"] + }, + "audio/mobile-xmf": { + "source": "iana", + "extensions": ["mxmf"] + }, + "audio/mp3": { + "compressible": false, + "extensions": ["mp3"] + }, + "audio/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["m4a","mp4a"] + }, + "audio/mp4a-latm": { + "source": "iana" + }, + "audio/mpa": { + "source": "iana" + }, + "audio/mpa-robust": { + "source": "iana" + }, + "audio/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] + }, + "audio/mpeg4-generic": { + "source": "iana" + }, + "audio/musepack": { + "source": "apache" + }, + "audio/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["oga","ogg","spx","opus"] + }, + "audio/opus": { + "source": "iana" + }, + "audio/parityfec": { + "source": "iana" + }, + "audio/pcma": { + "source": "iana" + }, + "audio/pcma-wb": { + "source": "iana" + }, + "audio/pcmu": { + "source": "iana" + }, + "audio/pcmu-wb": { + "source": "iana" + }, + "audio/prs.sid": { + "source": "iana" + }, + "audio/qcelp": { + "source": "iana" + }, + "audio/raptorfec": { + "source": "iana" + }, + "audio/red": { + "source": "iana" + }, + "audio/rtp-enc-aescm128": { + "source": "iana" + }, + "audio/rtp-midi": { + "source": "iana" + }, + "audio/rtploopback": { + "source": "iana" + }, + "audio/rtx": { + "source": "iana" + }, + "audio/s3m": { + "source": "apache", + "extensions": ["s3m"] + }, + "audio/scip": { + "source": "iana" + }, + "audio/silk": { + "source": "apache", + "extensions": ["sil"] + }, + "audio/smv": { + "source": "iana" + }, + "audio/smv-qcp": { + "source": "iana" + }, + "audio/smv0": { + "source": "iana" + }, + "audio/sofa": { + "source": "iana" + }, + "audio/sp-midi": { + "source": "iana" + }, + "audio/speex": { + "source": "iana" + }, + "audio/t140c": { + "source": "iana" + }, + "audio/t38": { + "source": "iana" + }, + "audio/telephone-event": { + "source": "iana" + }, + "audio/tetra_acelp": { + "source": "iana" + }, + "audio/tetra_acelp_bb": { + "source": "iana" + }, + "audio/tone": { + "source": "iana" + }, + "audio/tsvcis": { + "source": "iana" + }, + "audio/uemclip": { + "source": "iana" + }, + "audio/ulpfec": { + "source": "iana" + }, + "audio/usac": { + "source": "iana" + }, + "audio/vdvi": { + "source": "iana" + }, + "audio/vmr-wb": { + "source": "iana" + }, + "audio/vnd.3gpp.iufp": { + "source": "iana" + }, + "audio/vnd.4sb": { + "source": "iana" + }, + "audio/vnd.audiokoz": { + "source": "iana" + }, + "audio/vnd.celp": { + "source": "iana" + }, + "audio/vnd.cisco.nse": { + "source": "iana" + }, + "audio/vnd.cmles.radio-events": { + "source": "iana" + }, + "audio/vnd.cns.anp1": { + "source": "iana" + }, + "audio/vnd.cns.inf1": { + "source": "iana" + }, + "audio/vnd.dece.audio": { + "source": "iana", + "extensions": ["uva","uvva"] + }, + "audio/vnd.digital-winds": { + "source": "iana", + "extensions": ["eol"] + }, + "audio/vnd.dlna.adts": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.1": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.2": { + "source": "iana" + }, + "audio/vnd.dolby.mlp": { + "source": "iana" + }, + "audio/vnd.dolby.mps": { + "source": "iana" + }, + "audio/vnd.dolby.pl2": { + "source": "iana" + }, + "audio/vnd.dolby.pl2x": { + "source": "iana" + }, + "audio/vnd.dolby.pl2z": { + "source": "iana" + }, + "audio/vnd.dolby.pulse.1": { + "source": "iana" + }, + "audio/vnd.dra": { + "source": "iana", + "extensions": ["dra"] + }, + "audio/vnd.dts": { + "source": "iana", + "extensions": ["dts"] + }, + "audio/vnd.dts.hd": { + "source": "iana", + "extensions": ["dtshd"] + }, + "audio/vnd.dts.uhd": { + "source": "iana" + }, + "audio/vnd.dvb.file": { + "source": "iana" + }, + "audio/vnd.everad.plj": { + "source": "iana" + }, + "audio/vnd.hns.audio": { + "source": "iana" + }, + "audio/vnd.lucent.voice": { + "source": "iana", + "extensions": ["lvp"] + }, + "audio/vnd.ms-playready.media.pya": { + "source": "iana", + "extensions": ["pya"] + }, + "audio/vnd.nokia.mobile-xmf": { + "source": "iana" + }, + "audio/vnd.nortel.vbk": { + "source": "iana" + }, + "audio/vnd.nuera.ecelp4800": { + "source": "iana", + "extensions": ["ecelp4800"] + }, + "audio/vnd.nuera.ecelp7470": { + "source": "iana", + "extensions": ["ecelp7470"] + }, + "audio/vnd.nuera.ecelp9600": { + "source": "iana", + "extensions": ["ecelp9600"] + }, + "audio/vnd.octel.sbc": { + "source": "iana" + }, + "audio/vnd.presonus.multitrack": { + "source": "iana" + }, + "audio/vnd.qcelp": { + "source": "iana" + }, + "audio/vnd.rhetorex.32kadpcm": { + "source": "iana" + }, + "audio/vnd.rip": { + "source": "iana", + "extensions": ["rip"] + }, + "audio/vnd.rn-realaudio": { + "compressible": false + }, + "audio/vnd.sealedmedia.softseal.mpeg": { + "source": "iana" + }, + "audio/vnd.vmx.cvsd": { + "source": "iana" + }, + "audio/vnd.wave": { + "compressible": false + }, + "audio/vorbis": { + "source": "iana", + "compressible": false + }, + "audio/vorbis-config": { + "source": "iana" + }, + "audio/wav": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/wave": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/webm": { + "source": "apache", + "compressible": false, + "extensions": ["weba"] + }, + "audio/x-aac": { + "source": "apache", + "compressible": false, + "extensions": ["aac"] + }, + "audio/x-aiff": { + "source": "apache", + "extensions": ["aif","aiff","aifc"] + }, + "audio/x-caf": { + "source": "apache", + "compressible": false, + "extensions": ["caf"] + }, + "audio/x-flac": { + "source": "apache", + "extensions": ["flac"] + }, + "audio/x-m4a": { + "source": "nginx", + "extensions": ["m4a"] + }, + "audio/x-matroska": { + "source": "apache", + "extensions": ["mka"] + }, + "audio/x-mpegurl": { + "source": "apache", + "extensions": ["m3u"] + }, + "audio/x-ms-wax": { + "source": "apache", + "extensions": ["wax"] + }, + "audio/x-ms-wma": { + "source": "apache", + "extensions": ["wma"] + }, + "audio/x-pn-realaudio": { + "source": "apache", + "extensions": ["ram","ra"] + }, + "audio/x-pn-realaudio-plugin": { + "source": "apache", + "extensions": ["rmp"] + }, + "audio/x-realaudio": { + "source": "nginx", + "extensions": ["ra"] + }, + "audio/x-tta": { + "source": "apache" + }, + "audio/x-wav": { + "source": "apache", + "extensions": ["wav"] + }, + "audio/xm": { + "source": "apache", + "extensions": ["xm"] + }, + "chemical/x-cdx": { + "source": "apache", + "extensions": ["cdx"] + }, + "chemical/x-cif": { + "source": "apache", + "extensions": ["cif"] + }, + "chemical/x-cmdf": { + "source": "apache", + "extensions": ["cmdf"] + }, + "chemical/x-cml": { + "source": "apache", + "extensions": ["cml"] + }, + "chemical/x-csml": { + "source": "apache", + "extensions": ["csml"] + }, + "chemical/x-pdb": { + "source": "apache" + }, + "chemical/x-xyz": { + "source": "apache", + "extensions": ["xyz"] + }, + "font/collection": { + "source": "iana", + "extensions": ["ttc"] + }, + "font/otf": { + "source": "iana", + "compressible": true, + "extensions": ["otf"] + }, + "font/sfnt": { + "source": "iana" + }, + "font/ttf": { + "source": "iana", + "compressible": true, + "extensions": ["ttf"] + }, + "font/woff": { + "source": "iana", + "extensions": ["woff"] + }, + "font/woff2": { + "source": "iana", + "extensions": ["woff2"] + }, + "image/aces": { + "source": "iana", + "extensions": ["exr"] + }, + "image/apng": { + "compressible": false, + "extensions": ["apng"] + }, + "image/avci": { + "source": "iana", + "extensions": ["avci"] + }, + "image/avcs": { + "source": "iana", + "extensions": ["avcs"] + }, + "image/avif": { + "source": "iana", + "compressible": false, + "extensions": ["avif"] + }, + "image/bmp": { + "source": "iana", + "compressible": true, + "extensions": ["bmp"] + }, + "image/cgm": { + "source": "iana", + "extensions": ["cgm"] + }, + "image/dicom-rle": { + "source": "iana", + "extensions": ["drle"] + }, + "image/emf": { + "source": "iana", + "extensions": ["emf"] + }, + "image/fits": { + "source": "iana", + "extensions": ["fits"] + }, + "image/g3fax": { + "source": "iana", + "extensions": ["g3"] + }, + "image/gif": { + "source": "iana", + "compressible": false, + "extensions": ["gif"] + }, + "image/heic": { + "source": "iana", + "extensions": ["heic"] + }, + "image/heic-sequence": { + "source": "iana", + "extensions": ["heics"] + }, + "image/heif": { + "source": "iana", + "extensions": ["heif"] + }, + "image/heif-sequence": { + "source": "iana", + "extensions": ["heifs"] + }, + "image/hej2k": { + "source": "iana", + "extensions": ["hej2"] + }, + "image/hsj2": { + "source": "iana", + "extensions": ["hsj2"] + }, + "image/ief": { + "source": "iana", + "extensions": ["ief"] + }, + "image/jls": { + "source": "iana", + "extensions": ["jls"] + }, + "image/jp2": { + "source": "iana", + "compressible": false, + "extensions": ["jp2","jpg2"] + }, + "image/jpeg": { + "source": "iana", + "compressible": false, + "extensions": ["jpeg","jpg","jpe"] + }, + "image/jph": { + "source": "iana", + "extensions": ["jph"] + }, + "image/jphc": { + "source": "iana", + "extensions": ["jhc"] + }, + "image/jpm": { + "source": "iana", + "compressible": false, + "extensions": ["jpm"] + }, + "image/jpx": { + "source": "iana", + "compressible": false, + "extensions": ["jpx","jpf"] + }, + "image/jxr": { + "source": "iana", + "extensions": ["jxr"] + }, + "image/jxra": { + "source": "iana", + "extensions": ["jxra"] + }, + "image/jxrs": { + "source": "iana", + "extensions": ["jxrs"] + }, + "image/jxs": { + "source": "iana", + "extensions": ["jxs"] + }, + "image/jxsc": { + "source": "iana", + "extensions": ["jxsc"] + }, + "image/jxsi": { + "source": "iana", + "extensions": ["jxsi"] + }, + "image/jxss": { + "source": "iana", + "extensions": ["jxss"] + }, + "image/ktx": { + "source": "iana", + "extensions": ["ktx"] + }, + "image/ktx2": { + "source": "iana", + "extensions": ["ktx2"] + }, + "image/naplps": { + "source": "iana" + }, + "image/pjpeg": { + "compressible": false + }, + "image/png": { + "source": "iana", + "compressible": false, + "extensions": ["png"] + }, + "image/prs.btif": { + "source": "iana", + "extensions": ["btif"] + }, + "image/prs.pti": { + "source": "iana", + "extensions": ["pti"] + }, + "image/pwg-raster": { + "source": "iana" + }, + "image/sgi": { + "source": "apache", + "extensions": ["sgi"] + }, + "image/svg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["svg","svgz"] + }, + "image/t38": { + "source": "iana", + "extensions": ["t38"] + }, + "image/tiff": { + "source": "iana", + "compressible": false, + "extensions": ["tif","tiff"] + }, + "image/tiff-fx": { + "source": "iana", + "extensions": ["tfx"] + }, + "image/vnd.adobe.photoshop": { + "source": "iana", + "compressible": true, + "extensions": ["psd"] + }, + "image/vnd.airzip.accelerator.azv": { + "source": "iana", + "extensions": ["azv"] + }, + "image/vnd.cns.inf2": { + "source": "iana" + }, + "image/vnd.dece.graphic": { + "source": "iana", + "extensions": ["uvi","uvvi","uvg","uvvg"] + }, + "image/vnd.djvu": { + "source": "iana", + "extensions": ["djvu","djv"] + }, + "image/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "image/vnd.dwg": { + "source": "iana", + "extensions": ["dwg"] + }, + "image/vnd.dxf": { + "source": "iana", + "extensions": ["dxf"] + }, + "image/vnd.fastbidsheet": { + "source": "iana", + "extensions": ["fbs"] + }, + "image/vnd.fpx": { + "source": "iana", + "extensions": ["fpx"] + }, + "image/vnd.fst": { + "source": "iana", + "extensions": ["fst"] + }, + "image/vnd.fujixerox.edmics-mmr": { + "source": "iana", + "extensions": ["mmr"] + }, + "image/vnd.fujixerox.edmics-rlc": { + "source": "iana", + "extensions": ["rlc"] + }, + "image/vnd.globalgraphics.pgb": { + "source": "iana" + }, + "image/vnd.microsoft.icon": { + "source": "iana", + "compressible": true, + "extensions": ["ico"] + }, + "image/vnd.mix": { + "source": "iana" + }, + "image/vnd.mozilla.apng": { + "source": "iana" + }, + "image/vnd.ms-dds": { + "compressible": true, + "extensions": ["dds"] + }, + "image/vnd.ms-modi": { + "source": "iana", + "extensions": ["mdi"] + }, + "image/vnd.ms-photo": { + "source": "apache", + "extensions": ["wdp"] + }, + "image/vnd.net-fpx": { + "source": "iana", + "extensions": ["npx"] + }, + "image/vnd.pco.b16": { + "source": "iana", + "extensions": ["b16"] + }, + "image/vnd.radiance": { + "source": "iana" + }, + "image/vnd.sealed.png": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.gif": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.jpg": { + "source": "iana" + }, + "image/vnd.svf": { + "source": "iana" + }, + "image/vnd.tencent.tap": { + "source": "iana", + "extensions": ["tap"] + }, + "image/vnd.valve.source.texture": { + "source": "iana", + "extensions": ["vtf"] + }, + "image/vnd.wap.wbmp": { + "source": "iana", + "extensions": ["wbmp"] + }, + "image/vnd.xiff": { + "source": "iana", + "extensions": ["xif"] + }, + "image/vnd.zbrush.pcx": { + "source": "iana", + "extensions": ["pcx"] + }, + "image/webp": { + "source": "apache", + "extensions": ["webp"] + }, + "image/wmf": { + "source": "iana", + "extensions": ["wmf"] + }, + "image/x-3ds": { + "source": "apache", + "extensions": ["3ds"] + }, + "image/x-cmu-raster": { + "source": "apache", + "extensions": ["ras"] + }, + "image/x-cmx": { + "source": "apache", + "extensions": ["cmx"] + }, + "image/x-freehand": { + "source": "apache", + "extensions": ["fh","fhc","fh4","fh5","fh7"] + }, + "image/x-icon": { + "source": "apache", + "compressible": true, + "extensions": ["ico"] + }, + "image/x-jng": { + "source": "nginx", + "extensions": ["jng"] + }, + "image/x-mrsid-image": { + "source": "apache", + "extensions": ["sid"] + }, + "image/x-ms-bmp": { + "source": "nginx", + "compressible": true, + "extensions": ["bmp"] + }, + "image/x-pcx": { + "source": "apache", + "extensions": ["pcx"] + }, + "image/x-pict": { + "source": "apache", + "extensions": ["pic","pct"] + }, + "image/x-portable-anymap": { + "source": "apache", + "extensions": ["pnm"] + }, + "image/x-portable-bitmap": { + "source": "apache", + "extensions": ["pbm"] + }, + "image/x-portable-graymap": { + "source": "apache", + "extensions": ["pgm"] + }, + "image/x-portable-pixmap": { + "source": "apache", + "extensions": ["ppm"] + }, + "image/x-rgb": { + "source": "apache", + "extensions": ["rgb"] + }, + "image/x-tga": { + "source": "apache", + "extensions": ["tga"] + }, + "image/x-xbitmap": { + "source": "apache", + "extensions": ["xbm"] + }, + "image/x-xcf": { + "compressible": false + }, + "image/x-xpixmap": { + "source": "apache", + "extensions": ["xpm"] + }, + "image/x-xwindowdump": { + "source": "apache", + "extensions": ["xwd"] + }, + "message/cpim": { + "source": "iana" + }, + "message/delivery-status": { + "source": "iana" + }, + "message/disposition-notification": { + "source": "iana", + "extensions": [ + "disposition-notification" + ] + }, + "message/external-body": { + "source": "iana" + }, + "message/feedback-report": { + "source": "iana" + }, + "message/global": { + "source": "iana", + "extensions": ["u8msg"] + }, + "message/global-delivery-status": { + "source": "iana", + "extensions": ["u8dsn"] + }, + "message/global-disposition-notification": { + "source": "iana", + "extensions": ["u8mdn"] + }, + "message/global-headers": { + "source": "iana", + "extensions": ["u8hdr"] + }, + "message/http": { + "source": "iana", + "compressible": false + }, + "message/imdn+xml": { + "source": "iana", + "compressible": true + }, + "message/news": { + "source": "iana" + }, + "message/partial": { + "source": "iana", + "compressible": false + }, + "message/rfc822": { + "source": "iana", + "compressible": true, + "extensions": ["eml","mime"] + }, + "message/s-http": { + "source": "iana" + }, + "message/sip": { + "source": "iana" + }, + "message/sipfrag": { + "source": "iana" + }, + "message/tracking-status": { + "source": "iana" + }, + "message/vnd.si.simp": { + "source": "iana" + }, + "message/vnd.wfa.wsc": { + "source": "iana", + "extensions": ["wsc"] + }, + "model/3mf": { + "source": "iana", + "extensions": ["3mf"] + }, + "model/e57": { + "source": "iana" + }, + "model/gltf+json": { + "source": "iana", + "compressible": true, + "extensions": ["gltf"] + }, + "model/gltf-binary": { + "source": "iana", + "compressible": true, + "extensions": ["glb"] + }, + "model/iges": { + "source": "iana", + "compressible": false, + "extensions": ["igs","iges"] + }, + "model/mesh": { + "source": "iana", + "compressible": false, + "extensions": ["msh","mesh","silo"] + }, + "model/mtl": { + "source": "iana", + "extensions": ["mtl"] + }, + "model/obj": { + "source": "iana", + "extensions": ["obj"] + }, + "model/step": { + "source": "iana" + }, + "model/step+xml": { + "source": "iana", + "compressible": true, + "extensions": ["stpx"] + }, + "model/step+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpz"] + }, + "model/step-xml+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpxz"] + }, + "model/stl": { + "source": "iana", + "extensions": ["stl"] + }, + "model/vnd.collada+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dae"] + }, + "model/vnd.dwf": { + "source": "iana", + "extensions": ["dwf"] + }, + "model/vnd.flatland.3dml": { + "source": "iana" + }, + "model/vnd.gdl": { + "source": "iana", + "extensions": ["gdl"] + }, + "model/vnd.gs-gdl": { + "source": "apache" + }, + "model/vnd.gs.gdl": { + "source": "iana" + }, + "model/vnd.gtw": { + "source": "iana", + "extensions": ["gtw"] + }, + "model/vnd.moml+xml": { + "source": "iana", + "compressible": true + }, + "model/vnd.mts": { + "source": "iana", + "extensions": ["mts"] + }, + "model/vnd.opengex": { + "source": "iana", + "extensions": ["ogex"] + }, + "model/vnd.parasolid.transmit.binary": { + "source": "iana", + "extensions": ["x_b"] + }, + "model/vnd.parasolid.transmit.text": { + "source": "iana", + "extensions": ["x_t"] + }, + "model/vnd.pytha.pyox": { + "source": "iana" + }, + "model/vnd.rosette.annotated-data-model": { + "source": "iana" + }, + "model/vnd.sap.vds": { + "source": "iana", + "extensions": ["vds"] + }, + "model/vnd.usdz+zip": { + "source": "iana", + "compressible": false, + "extensions": ["usdz"] + }, + "model/vnd.valve.source.compiled-map": { + "source": "iana", + "extensions": ["bsp"] + }, + "model/vnd.vtu": { + "source": "iana", + "extensions": ["vtu"] + }, + "model/vrml": { + "source": "iana", + "compressible": false, + "extensions": ["wrl","vrml"] + }, + "model/x3d+binary": { + "source": "apache", + "compressible": false, + "extensions": ["x3db","x3dbz"] + }, + "model/x3d+fastinfoset": { + "source": "iana", + "extensions": ["x3db"] + }, + "model/x3d+vrml": { + "source": "apache", + "compressible": false, + "extensions": ["x3dv","x3dvz"] + }, + "model/x3d+xml": { + "source": "iana", + "compressible": true, + "extensions": ["x3d","x3dz"] + }, + "model/x3d-vrml": { + "source": "iana", + "extensions": ["x3dv"] + }, + "multipart/alternative": { + "source": "iana", + "compressible": false + }, + "multipart/appledouble": { + "source": "iana" + }, + "multipart/byteranges": { + "source": "iana" + }, + "multipart/digest": { + "source": "iana" + }, + "multipart/encrypted": { + "source": "iana", + "compressible": false + }, + "multipart/form-data": { + "source": "iana", + "compressible": false + }, + "multipart/header-set": { + "source": "iana" + }, + "multipart/mixed": { + "source": "iana" + }, + "multipart/multilingual": { + "source": "iana" + }, + "multipart/parallel": { + "source": "iana" + }, + "multipart/related": { + "source": "iana", + "compressible": false + }, + "multipart/report": { + "source": "iana" + }, + "multipart/signed": { + "source": "iana", + "compressible": false + }, + "multipart/vnd.bint.med-plus": { + "source": "iana" + }, + "multipart/voice-message": { + "source": "iana" + }, + "multipart/x-mixed-replace": { + "source": "iana" + }, + "text/1d-interleaved-parityfec": { + "source": "iana" + }, + "text/cache-manifest": { + "source": "iana", + "compressible": true, + "extensions": ["appcache","manifest"] + }, + "text/calendar": { + "source": "iana", + "extensions": ["ics","ifb"] + }, + "text/calender": { + "compressible": true + }, + "text/cmd": { + "compressible": true + }, + "text/coffeescript": { + "extensions": ["coffee","litcoffee"] + }, + "text/cql": { + "source": "iana" + }, + "text/cql-expression": { + "source": "iana" + }, + "text/cql-identifier": { + "source": "iana" + }, + "text/css": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["css"] + }, + "text/csv": { + "source": "iana", + "compressible": true, + "extensions": ["csv"] + }, + "text/csv-schema": { + "source": "iana" + }, + "text/directory": { + "source": "iana" + }, + "text/dns": { + "source": "iana" + }, + "text/ecmascript": { + "source": "iana" + }, + "text/encaprtp": { + "source": "iana" + }, + "text/enriched": { + "source": "iana" + }, + "text/fhirpath": { + "source": "iana" + }, + "text/flexfec": { + "source": "iana" + }, + "text/fwdred": { + "source": "iana" + }, + "text/gff3": { + "source": "iana" + }, + "text/grammar-ref-list": { + "source": "iana" + }, + "text/html": { + "source": "iana", + "compressible": true, + "extensions": ["html","htm","shtml"] + }, + "text/jade": { + "extensions": ["jade"] + }, + "text/javascript": { + "source": "iana", + "compressible": true + }, + "text/jcr-cnd": { + "source": "iana" + }, + "text/jsx": { + "compressible": true, + "extensions": ["jsx"] + }, + "text/less": { + "compressible": true, + "extensions": ["less"] + }, + "text/markdown": { + "source": "iana", + "compressible": true, + "extensions": ["markdown","md"] + }, + "text/mathml": { + "source": "nginx", + "extensions": ["mml"] + }, + "text/mdx": { + "compressible": true, + "extensions": ["mdx"] + }, + "text/mizar": { + "source": "iana" + }, + "text/n3": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["n3"] + }, + "text/parameters": { + "source": "iana", + "charset": "UTF-8" + }, + "text/parityfec": { + "source": "iana" + }, + "text/plain": { + "source": "iana", + "compressible": true, + "extensions": ["txt","text","conf","def","list","log","in","ini"] + }, + "text/provenance-notation": { + "source": "iana", + "charset": "UTF-8" + }, + "text/prs.fallenstein.rst": { + "source": "iana" + }, + "text/prs.lines.tag": { + "source": "iana", + "extensions": ["dsc"] + }, + "text/prs.prop.logic": { + "source": "iana" + }, + "text/raptorfec": { + "source": "iana" + }, + "text/red": { + "source": "iana" + }, + "text/rfc822-headers": { + "source": "iana" + }, + "text/richtext": { + "source": "iana", + "compressible": true, + "extensions": ["rtx"] + }, + "text/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "text/rtp-enc-aescm128": { + "source": "iana" + }, + "text/rtploopback": { + "source": "iana" + }, + "text/rtx": { + "source": "iana" + }, + "text/sgml": { + "source": "iana", + "extensions": ["sgml","sgm"] + }, + "text/shaclc": { + "source": "iana" + }, + "text/shex": { + "source": "iana", + "extensions": ["shex"] + }, + "text/slim": { + "extensions": ["slim","slm"] + }, + "text/spdx": { + "source": "iana", + "extensions": ["spdx"] + }, + "text/strings": { + "source": "iana" + }, + "text/stylus": { + "extensions": ["stylus","styl"] + }, + "text/t140": { + "source": "iana" + }, + "text/tab-separated-values": { + "source": "iana", + "compressible": true, + "extensions": ["tsv"] + }, + "text/troff": { + "source": "iana", + "extensions": ["t","tr","roff","man","me","ms"] + }, + "text/turtle": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["ttl"] + }, + "text/ulpfec": { + "source": "iana" + }, + "text/uri-list": { + "source": "iana", + "compressible": true, + "extensions": ["uri","uris","urls"] + }, + "text/vcard": { + "source": "iana", + "compressible": true, + "extensions": ["vcard"] + }, + "text/vnd.a": { + "source": "iana" + }, + "text/vnd.abc": { + "source": "iana" + }, + "text/vnd.ascii-art": { + "source": "iana" + }, + "text/vnd.curl": { + "source": "iana", + "extensions": ["curl"] + }, + "text/vnd.curl.dcurl": { + "source": "apache", + "extensions": ["dcurl"] + }, + "text/vnd.curl.mcurl": { + "source": "apache", + "extensions": ["mcurl"] + }, + "text/vnd.curl.scurl": { + "source": "apache", + "extensions": ["scurl"] + }, + "text/vnd.debian.copyright": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.dmclientscript": { + "source": "iana" + }, + "text/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "text/vnd.esmertec.theme-descriptor": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.familysearch.gedcom": { + "source": "iana", + "extensions": ["ged"] + }, + "text/vnd.ficlab.flt": { + "source": "iana" + }, + "text/vnd.fly": { + "source": "iana", + "extensions": ["fly"] + }, + "text/vnd.fmi.flexstor": { + "source": "iana", + "extensions": ["flx"] + }, + "text/vnd.gml": { + "source": "iana" + }, + "text/vnd.graphviz": { + "source": "iana", + "extensions": ["gv"] + }, + "text/vnd.hans": { + "source": "iana" + }, + "text/vnd.hgl": { + "source": "iana" + }, + "text/vnd.in3d.3dml": { + "source": "iana", + "extensions": ["3dml"] + }, + "text/vnd.in3d.spot": { + "source": "iana", + "extensions": ["spot"] + }, + "text/vnd.iptc.newsml": { + "source": "iana" + }, + "text/vnd.iptc.nitf": { + "source": "iana" + }, + "text/vnd.latex-z": { + "source": "iana" + }, + "text/vnd.motorola.reflex": { + "source": "iana" + }, + "text/vnd.ms-mediapackage": { + "source": "iana" + }, + "text/vnd.net2phone.commcenter.command": { + "source": "iana" + }, + "text/vnd.radisys.msml-basic-layout": { + "source": "iana" + }, + "text/vnd.senx.warpscript": { + "source": "iana" + }, + "text/vnd.si.uricatalogue": { + "source": "iana" + }, + "text/vnd.sosi": { + "source": "iana" + }, + "text/vnd.sun.j2me.app-descriptor": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["jad"] + }, + "text/vnd.trolltech.linguist": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.wap.si": { + "source": "iana" + }, + "text/vnd.wap.sl": { + "source": "iana" + }, + "text/vnd.wap.wml": { + "source": "iana", + "extensions": ["wml"] + }, + "text/vnd.wap.wmlscript": { + "source": "iana", + "extensions": ["wmls"] + }, + "text/vtt": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["vtt"] + }, + "text/x-asm": { + "source": "apache", + "extensions": ["s","asm"] + }, + "text/x-c": { + "source": "apache", + "extensions": ["c","cc","cxx","cpp","h","hh","dic"] + }, + "text/x-component": { + "source": "nginx", + "extensions": ["htc"] + }, + "text/x-fortran": { + "source": "apache", + "extensions": ["f","for","f77","f90"] + }, + "text/x-gwt-rpc": { + "compressible": true + }, + "text/x-handlebars-template": { + "extensions": ["hbs"] + }, + "text/x-java-source": { + "source": "apache", + "extensions": ["java"] + }, + "text/x-jquery-tmpl": { + "compressible": true + }, + "text/x-lua": { + "extensions": ["lua"] + }, + "text/x-markdown": { + "compressible": true, + "extensions": ["mkd"] + }, + "text/x-nfo": { + "source": "apache", + "extensions": ["nfo"] + }, + "text/x-opml": { + "source": "apache", + "extensions": ["opml"] + }, + "text/x-org": { + "compressible": true, + "extensions": ["org"] + }, + "text/x-pascal": { + "source": "apache", + "extensions": ["p","pas"] + }, + "text/x-processing": { + "compressible": true, + "extensions": ["pde"] + }, + "text/x-sass": { + "extensions": ["sass"] + }, + "text/x-scss": { + "extensions": ["scss"] + }, + "text/x-setext": { + "source": "apache", + "extensions": ["etx"] + }, + "text/x-sfv": { + "source": "apache", + "extensions": ["sfv"] + }, + "text/x-suse-ymp": { + "compressible": true, + "extensions": ["ymp"] + }, + "text/x-uuencode": { + "source": "apache", + "extensions": ["uu"] + }, + "text/x-vcalendar": { + "source": "apache", + "extensions": ["vcs"] + }, + "text/x-vcard": { + "source": "apache", + "extensions": ["vcf"] + }, + "text/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml"] + }, + "text/xml-external-parsed-entity": { + "source": "iana" + }, + "text/yaml": { + "compressible": true, + "extensions": ["yaml","yml"] + }, + "video/1d-interleaved-parityfec": { + "source": "iana" + }, + "video/3gpp": { + "source": "iana", + "extensions": ["3gp","3gpp"] + }, + "video/3gpp-tt": { + "source": "iana" + }, + "video/3gpp2": { + "source": "iana", + "extensions": ["3g2"] + }, + "video/av1": { + "source": "iana" + }, + "video/bmpeg": { + "source": "iana" + }, + "video/bt656": { + "source": "iana" + }, + "video/celb": { + "source": "iana" + }, + "video/dv": { + "source": "iana" + }, + "video/encaprtp": { + "source": "iana" + }, + "video/ffv1": { + "source": "iana" + }, + "video/flexfec": { + "source": "iana" + }, + "video/h261": { + "source": "iana", + "extensions": ["h261"] + }, + "video/h263": { + "source": "iana", + "extensions": ["h263"] + }, + "video/h263-1998": { + "source": "iana" + }, + "video/h263-2000": { + "source": "iana" + }, + "video/h264": { + "source": "iana", + "extensions": ["h264"] + }, + "video/h264-rcdo": { + "source": "iana" + }, + "video/h264-svc": { + "source": "iana" + }, + "video/h265": { + "source": "iana" + }, + "video/iso.segment": { + "source": "iana", + "extensions": ["m4s"] + }, + "video/jpeg": { + "source": "iana", + "extensions": ["jpgv"] + }, + "video/jpeg2000": { + "source": "iana" + }, + "video/jpm": { + "source": "apache", + "extensions": ["jpm","jpgm"] + }, + "video/jxsv": { + "source": "iana" + }, + "video/mj2": { + "source": "iana", + "extensions": ["mj2","mjp2"] + }, + "video/mp1s": { + "source": "iana" + }, + "video/mp2p": { + "source": "iana" + }, + "video/mp2t": { + "source": "iana", + "extensions": ["ts"] + }, + "video/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["mp4","mp4v","mpg4"] + }, + "video/mp4v-es": { + "source": "iana" + }, + "video/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpeg","mpg","mpe","m1v","m2v"] + }, + "video/mpeg4-generic": { + "source": "iana" + }, + "video/mpv": { + "source": "iana" + }, + "video/nv": { + "source": "iana" + }, + "video/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogv"] + }, + "video/parityfec": { + "source": "iana" + }, + "video/pointer": { + "source": "iana" + }, + "video/quicktime": { + "source": "iana", + "compressible": false, + "extensions": ["qt","mov"] + }, + "video/raptorfec": { + "source": "iana" + }, + "video/raw": { + "source": "iana" + }, + "video/rtp-enc-aescm128": { + "source": "iana" + }, + "video/rtploopback": { + "source": "iana" + }, + "video/rtx": { + "source": "iana" + }, + "video/scip": { + "source": "iana" + }, + "video/smpte291": { + "source": "iana" + }, + "video/smpte292m": { + "source": "iana" + }, + "video/ulpfec": { + "source": "iana" + }, + "video/vc1": { + "source": "iana" + }, + "video/vc2": { + "source": "iana" + }, + "video/vnd.cctv": { + "source": "iana" + }, + "video/vnd.dece.hd": { + "source": "iana", + "extensions": ["uvh","uvvh"] + }, + "video/vnd.dece.mobile": { + "source": "iana", + "extensions": ["uvm","uvvm"] + }, + "video/vnd.dece.mp4": { + "source": "iana" + }, + "video/vnd.dece.pd": { + "source": "iana", + "extensions": ["uvp","uvvp"] + }, + "video/vnd.dece.sd": { + "source": "iana", + "extensions": ["uvs","uvvs"] + }, + "video/vnd.dece.video": { + "source": "iana", + "extensions": ["uvv","uvvv"] + }, + "video/vnd.directv.mpeg": { + "source": "iana" + }, + "video/vnd.directv.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dlna.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dvb.file": { + "source": "iana", + "extensions": ["dvb"] + }, + "video/vnd.fvt": { + "source": "iana", + "extensions": ["fvt"] + }, + "video/vnd.hns.video": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsavc": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsmpeg2": { + "source": "iana" + }, + "video/vnd.motorola.video": { + "source": "iana" + }, + "video/vnd.motorola.videop": { + "source": "iana" + }, + "video/vnd.mpegurl": { + "source": "iana", + "extensions": ["mxu","m4u"] + }, + "video/vnd.ms-playready.media.pyv": { + "source": "iana", + "extensions": ["pyv"] + }, + "video/vnd.nokia.interleaved-multimedia": { + "source": "iana" + }, + "video/vnd.nokia.mp4vr": { + "source": "iana" + }, + "video/vnd.nokia.videovoip": { + "source": "iana" + }, + "video/vnd.objectvideo": { + "source": "iana" + }, + "video/vnd.radgamettools.bink": { + "source": "iana" + }, + "video/vnd.radgamettools.smacker": { + "source": "iana" + }, + "video/vnd.sealed.mpeg1": { + "source": "iana" + }, + "video/vnd.sealed.mpeg4": { + "source": "iana" + }, + "video/vnd.sealed.swf": { + "source": "iana" + }, + "video/vnd.sealedmedia.softseal.mov": { + "source": "iana" + }, + "video/vnd.uvvu.mp4": { + "source": "iana", + "extensions": ["uvu","uvvu"] + }, + "video/vnd.vivo": { + "source": "iana", + "extensions": ["viv"] + }, + "video/vnd.youtube.yt": { + "source": "iana" + }, + "video/vp8": { + "source": "iana" + }, + "video/vp9": { + "source": "iana" + }, + "video/webm": { + "source": "apache", + "compressible": false, + "extensions": ["webm"] + }, + "video/x-f4v": { + "source": "apache", + "extensions": ["f4v"] + }, + "video/x-fli": { + "source": "apache", + "extensions": ["fli"] + }, + "video/x-flv": { + "source": "apache", + "compressible": false, + "extensions": ["flv"] + }, + "video/x-m4v": { + "source": "apache", + "extensions": ["m4v"] + }, + "video/x-matroska": { + "source": "apache", + "compressible": false, + "extensions": ["mkv","mk3d","mks"] + }, + "video/x-mng": { + "source": "apache", + "extensions": ["mng"] + }, + "video/x-ms-asf": { + "source": "apache", + "extensions": ["asf","asx"] + }, + "video/x-ms-vob": { + "source": "apache", + "extensions": ["vob"] + }, + "video/x-ms-wm": { + "source": "apache", + "extensions": ["wm"] + }, + "video/x-ms-wmv": { + "source": "apache", + "compressible": false, + "extensions": ["wmv"] + }, + "video/x-ms-wmx": { + "source": "apache", + "extensions": ["wmx"] + }, + "video/x-ms-wvx": { + "source": "apache", + "extensions": ["wvx"] + }, + "video/x-msvideo": { + "source": "apache", + "extensions": ["avi"] + }, + "video/x-sgi-movie": { + "source": "apache", + "extensions": ["movie"] + }, + "video/x-smv": { + "source": "apache", + "extensions": ["smv"] + }, + "x-conference/x-cooltalk": { + "source": "apache", + "extensions": ["ice"] + }, + "x-shader/x-fragment": { + "compressible": true + }, + "x-shader/x-vertex": { + "compressible": true + } +} diff --git a/node_modules/mime-db/index.js b/node_modules/mime-db/index.js new file mode 100644 index 0000000..ec2be30 --- /dev/null +++ b/node_modules/mime-db/index.js @@ -0,0 +1,12 @@ +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = require('./db.json') diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json new file mode 100644 index 0000000..32c14b8 --- /dev/null +++ b/node_modules/mime-db/package.json @@ -0,0 +1,60 @@ +{ + "name": "mime-db", + "description": "Media Type Database", + "version": "1.52.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Robert Kieffer (http://github.com/broofa)" + ], + "license": "MIT", + "keywords": [ + "mime", + "db", + "type", + "types", + "database", + "charset", + "charsets" + ], + "repository": "jshttp/mime-db", + "devDependencies": { + "bluebird": "3.7.2", + "co": "4.6.0", + "cogent": "1.0.1", + "csv-parse": "4.16.3", + "eslint": "7.32.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.1.1", + "eslint-plugin-standard": "4.1.0", + "gnode": "0.1.2", + "media-typer": "1.1.0", + "mocha": "9.2.1", + "nyc": "15.1.0", + "raw-body": "2.5.0", + "stream-to-array": "2.3.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "db.json", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build", + "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "update": "npm run fetch && npm run build", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md new file mode 100644 index 0000000..c5043b7 --- /dev/null +++ b/node_modules/mime-types/HISTORY.md @@ -0,0 +1,397 @@ +2.1.35 / 2022-03-12 +=================== + + * deps: mime-db@1.52.0 + - Add extensions from IANA for more `image/*` types + - Add extension `.asc` to `application/pgp-keys` + - Add extensions to various XML types + - Add new upstream MIME types + +2.1.34 / 2021-11-08 +=================== + + * deps: mime-db@1.51.0 + - Add new upstream MIME types + +2.1.33 / 2021-10-01 +=================== + + * deps: mime-db@1.50.0 + - Add deprecated iWorks mime types and extensions + - Add new upstream MIME types + +2.1.32 / 2021-07-27 +=================== + + * deps: mime-db@1.49.0 + - Add extension `.trig` to `application/trig` + - Add new upstream MIME types + +2.1.31 / 2021-06-01 +=================== + + * deps: mime-db@1.48.0 + - Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + - Add new upstream MIME types + +2.1.30 / 2021-04-02 +=================== + + * deps: mime-db@1.47.0 + - Add extension `.amr` to `audio/amr` + - Remove ambigious extensions from IANA for `application/*+xml` types + - Update primary extension to `.es` for `application/ecmascript` + +2.1.29 / 2021-02-17 +=================== + + * deps: mime-db@1.46.0 + - Add extension `.amr` to `audio/amr` + - Add extension `.m4s` to `video/iso.segment` + - Add extension `.opus` to `audio/ogg` + - Add new upstream MIME types + +2.1.28 / 2021-01-01 +=================== + + * deps: mime-db@1.45.0 + - Add `application/ubjson` with extension `.ubj` + - Add `image/avif` with extension `.avif` + - Add `image/ktx2` with extension `.ktx2` + - Add extension `.dbf` to `application/vnd.dbf` + - Add extension `.rar` to `application/vnd.rar` + - Add extension `.td` to `application/urc-targetdesc+xml` + - Add new upstream MIME types + - Fix extension of `application/vnd.apple.keynote` to be `.key` + +2.1.27 / 2020-04-23 +=================== + + * deps: mime-db@1.44.0 + - Add charsets from IANA + - Add extension `.cjs` to `application/node` + - Add new upstream MIME types + +2.1.26 / 2020-01-05 +=================== + + * deps: mime-db@1.43.0 + - Add `application/x-keepass2` with extension `.kdbx` + - Add extension `.mxmf` to `audio/mobile-xmf` + - Add extensions from IANA for `application/*+xml` types + - Add new upstream MIME types + +2.1.25 / 2019-11-12 +=================== + + * deps: mime-db@1.42.0 + - Add new upstream MIME types + - Add `application/toml` with extension `.toml` + - Add `image/vnd.ms-dds` with extension `.dds` + +2.1.24 / 2019-04-20 +=================== + + * deps: mime-db@1.40.0 + - Add extensions from IANA for `model/*` types + - Add `text/mdx` with extension `.mdx` + +2.1.23 / 2019-04-17 +=================== + + * deps: mime-db@~1.39.0 + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add new upstream MIME types + +2.1.22 / 2019-02-14 +=================== + + * deps: mime-db@~1.38.0 + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add new upstream MIME types + +2.1.21 / 2018-10-19 +=================== + + * deps: mime-db@~1.37.0 + - Add extensions to HEIC image types + - Add new upstream MIME types + +2.1.20 / 2018-08-26 +=================== + + * deps: mime-db@~1.36.0 + - Add Apple file extensions from IANA + - Add extensions from IANA for `image/*` types + - Add new upstream MIME types + +2.1.19 / 2018-07-17 +=================== + + * deps: mime-db@~1.35.0 + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.owl` to `application/rdf+xml` + - Add new upstream MIME types + - Add UTF-8 as default charset for `text/turtle` + +2.1.18 / 2018-02-16 +=================== + + * deps: mime-db@~1.33.0 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add new upstream MIME types + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +2.1.17 / 2017-09-01 +=================== + + * deps: mime-db@~1.30.0 + - Add `application/vnd.ms-outlook` + - Add `application/x-arj` + - Add extension `.mjs` to `application/javascript` + - Add glTF types and extensions + - Add new upstream MIME types + - Add `text/x-org` + - Add VirtualBox MIME types + - Fix `source` records for `video/*` types that are IANA + - Update `font/opentype` to registered `font/otf` + +2.1.16 / 2017-07-24 +=================== + + * deps: mime-db@~1.29.0 + - Add `application/fido.trusted-apps+json` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add new upstream MIME types + - Update extensions `.md` and `.markdown` to be `text/markdown` + +2.1.15 / 2017-03-23 +=================== + + * deps: mime-db@~1.27.0 + - Add new mime types + - Add `image/apng` + +2.1.14 / 2017-01-14 +=================== + + * deps: mime-db@~1.26.0 + - Add new mime types + +2.1.13 / 2016-11-18 +=================== + + * deps: mime-db@~1.25.0 + - Add new mime types + +2.1.12 / 2016-09-18 +=================== + + * deps: mime-db@~1.24.0 + - Add new mime types + - Add `audio/mp3` + +2.1.11 / 2016-05-01 +=================== + + * deps: mime-db@~1.23.0 + - Add new mime types + +2.1.10 / 2016-02-15 +=================== + + * deps: mime-db@~1.22.0 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +2.1.9 / 2016-01-06 +================== + + * deps: mime-db@~1.21.0 + - Add new mime types + +2.1.8 / 2015-11-30 +================== + + * deps: mime-db@~1.20.0 + - Add new mime types + +2.1.7 / 2015-09-20 +================== + + * deps: mime-db@~1.19.0 + - Add new mime types + +2.1.6 / 2015-09-03 +================== + + * deps: mime-db@~1.18.0 + - Add new mime types + +2.1.5 / 2015-08-20 +================== + + * deps: mime-db@~1.17.0 + - Add new mime types + +2.1.4 / 2015-07-30 +================== + + * deps: mime-db@~1.16.0 + - Add new mime types + +2.1.3 / 2015-07-13 +================== + + * deps: mime-db@~1.15.0 + - Add new mime types + +2.1.2 / 2015-06-25 +================== + + * deps: mime-db@~1.14.0 + - Add new mime types + +2.1.1 / 2015-06-08 +================== + + * perf: fix deopt during mapping + +2.1.0 / 2015-06-07 +================== + + * Fix incorrectly treating extension-less file name as extension + - i.e. `'path/to/json'` will no longer return `application/json` + * Fix `.charset(type)` to accept parameters + * Fix `.charset(type)` to match case-insensitive + * Improve generation of extension to MIME mapping + * Refactor internals for readability and no argument reassignment + * Prefer `application/*` MIME types from the same source + * Prefer any type over `application/octet-stream` + * deps: mime-db@~1.13.0 + - Add nginx as a source + - Add new mime types + +2.0.14 / 2015-06-06 +=================== + + * deps: mime-db@~1.12.0 + - Add new mime types + +2.0.13 / 2015-05-31 +=================== + + * deps: mime-db@~1.11.0 + - Add new mime types + +2.0.12 / 2015-05-19 +=================== + + * deps: mime-db@~1.10.0 + - Add new mime types + +2.0.11 / 2015-05-05 +=================== + + * deps: mime-db@~1.9.1 + - Add new mime types + +2.0.10 / 2015-03-13 +=================== + + * deps: mime-db@~1.8.0 + - Add new mime types + +2.0.9 / 2015-02-09 +================== + + * deps: mime-db@~1.7.0 + - Add new mime types + - Community extensions ownership transferred from `node-mime` + +2.0.8 / 2015-01-29 +================== + + * deps: mime-db@~1.6.0 + - Add new mime types + +2.0.7 / 2014-12-30 +================== + + * deps: mime-db@~1.5.0 + - Add new mime types + - Fix various invalid MIME type entries + +2.0.6 / 2014-12-30 +================== + + * deps: mime-db@~1.4.0 + - Add new mime types + - Fix various invalid MIME type entries + - Remove example template MIME types + +2.0.5 / 2014-12-29 +================== + + * deps: mime-db@~1.3.1 + - Fix missing extensions + +2.0.4 / 2014-12-10 +================== + + * deps: mime-db@~1.3.0 + - Add new mime types + +2.0.3 / 2014-11-09 +================== + + * deps: mime-db@~1.2.0 + - Add new mime types + +2.0.2 / 2014-09-28 +================== + + * deps: mime-db@~1.1.0 + - Add new mime types + - Update charsets + +2.0.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + +2.0.0 / 2014-09-02 +================== + + * Use `mime-db` + * Remove `.define()` + +1.0.2 / 2014-08-04 +================== + + * Set charset=utf-8 for `text/javascript` + +1.0.1 / 2014-06-24 +================== + + * Add `text/jsx` type + +1.0.0 / 2014-05-12 +================== + + * Return `false` for unknown types + * Set charset=utf-8 for `application/json` + +0.1.0 / 2014-05-02 +================== + + * Initial release diff --git a/node_modules/mime-types/LICENSE b/node_modules/mime-types/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/mime-types/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md new file mode 100644 index 0000000..48d2fb4 --- /dev/null +++ b/node_modules/mime-types/README.md @@ -0,0 +1,113 @@ +# mime-types + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +The ultimate javascript content-type utility. + +Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: + +- __No fallbacks.__ Instead of naively returning the first available type, + `mime-types` simply returns `false`, so do + `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. +- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. +- No `.define()` functionality +- Bug fixes for `.lookup(path)` + +Otherwise, the API is compatible with `mime` 1.x. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install mime-types +``` + +## Adding Types + +All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), +so open a PR there if you'd like to add mime types. + +## API + +```js +var mime = require('mime-types') +``` + +All functions return `false` if input is invalid or not found. + +### mime.lookup(path) + +Lookup the content-type associated with a file. + +```js +mime.lookup('json') // 'application/json' +mime.lookup('.md') // 'text/markdown' +mime.lookup('file.html') // 'text/html' +mime.lookup('folder/file.js') // 'application/javascript' +mime.lookup('folder/.htaccess') // false + +mime.lookup('cats') // false +``` + +### mime.contentType(type) + +Create a full content-type header given a content-type or extension. +When given an extension, `mime.lookup` is used to get the matching +content-type, otherwise the given content-type is used. Then if the +content-type does not already have a `charset` parameter, `mime.charset` +is used to get the default charset and add to the returned content-type. + +```js +mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' +mime.contentType('file.json') // 'application/json; charset=utf-8' +mime.contentType('text/html') // 'text/html; charset=utf-8' +mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' + +// from a full path +mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' +``` + +### mime.extension(type) + +Get the default extension for a content-type. + +```js +mime.extension('application/octet-stream') // 'bin' +``` + +### mime.charset(type) + +Lookup the implied default charset of a content-type. + +```js +mime.charset('text/markdown') // 'UTF-8' +``` + +### var type = mime.types[extension] + +A map of content-types by extension. + +### [extensions...] = mime.extensions[type] + +A map of extensions by content-type. + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-types/master?label=ci +[ci-url]: https://github.com/jshttp/mime-types/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master +[node-version-image]: https://badgen.net/npm/node/mime-types +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-types +[npm-url]: https://npmjs.org/package/mime-types +[npm-version-image]: https://badgen.net/npm/v/mime-types diff --git a/node_modules/mime-types/index.js b/node_modules/mime-types/index.js new file mode 100644 index 0000000..b9f34d5 --- /dev/null +++ b/node_modules/mime-types/index.js @@ -0,0 +1,188 @@ +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') +var extname = require('path').extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json new file mode 100644 index 0000000..bbef696 --- /dev/null +++ b/node_modules/mime-types/package.json @@ -0,0 +1,44 @@ +{ + "name": "mime-types", + "description": "The ultimate javascript content-type utility.", + "version": "2.1.35", + "contributors": [ + "Douglas Christopher Wilson ", + "Jeremiah Senkpiel (https://searchbeam.jit.su)", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "mime", + "types" + ], + "repository": "jshttp/mime-types", + "dependencies": { + "mime-db": "1.52.0" + }, + "devDependencies": { + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.2.0", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.2", + "nyc": "15.1.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec test/test.js", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/mime/CHANGELOG.md b/node_modules/mime/CHANGELOG.md new file mode 100644 index 0000000..dd25431 --- /dev/null +++ b/node_modules/mime/CHANGELOG.md @@ -0,0 +1,296 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [2.6.0](https://github.com/broofa/mime/compare/v2.5.2...v2.6.0) (2021-11-02) + + +### Features + +* mime-db@1.50.0 ([cef0cc4](https://github.com/broofa/mime/commit/cef0cc484ff6d05ff1e12b54ca3e8b856fbc14d8)) + +### [2.5.2](https://github.com/broofa/mime/compare/v2.5.0...v2.5.2) (2021-02-17) + + +### Bug Fixes + +* update to mime-db@1.46.0, fixes [#253](https://github.com/broofa/mime/issues/253) ([f10e6aa](https://github.com/broofa/mime/commit/f10e6aa62e1356de7e2491d7fb4374c8dac65800)) + +## [2.5.0](https://github.com/broofa/mime/compare/v2.4.7...v2.5.0) (2021-01-16) + + +### Features + +* improved CLI ([#244](https://github.com/broofa/mime/issues/244)) ([c8a8356](https://github.com/broofa/mime/commit/c8a8356e3b27f3ef46b64b89b428fdb547b14d5f)) + +### [2.4.7](https://github.com/broofa/mime/compare/v2.4.6...v2.4.7) (2020-12-16) + + +### Bug Fixes + +* update to latest mime-db ([43b09ef](https://github.com/broofa/mime/commit/43b09eff0233eacc449af2b1f99a19ba9e104a44)) + +### [2.4.6](https://github.com/broofa/mime/compare/v2.4.5...v2.4.6) (2020-05-27) + + +### Bug Fixes + +* add cli.js to package.json files ([#237](https://github.com/broofa/mime/issues/237)) ([6c070bc](https://github.com/broofa/mime/commit/6c070bc298fa12a48e2ed126fbb9de641a1e7ebc)) + +### [2.4.5](https://github.com/broofa/mime/compare/v2.4.4...v2.4.5) (2020-05-01) + + +### Bug Fixes + +* fix [#236](https://github.com/broofa/mime/issues/236) ([7f4ecd0](https://github.com/broofa/mime/commit/7f4ecd0d850ed22c9e3bfda2c11fc74e4dde12a7)) +* update to latest mime-db ([c5cb3f2](https://github.com/broofa/mime/commit/c5cb3f2ab8b07642a066efbde1142af1b90c927b)) + +### [2.4.4](https://github.com/broofa/mime/compare/v2.4.3...v2.4.4) (2019-06-07) + + + +### [2.4.3](https://github.com/broofa/mime/compare/v2.4.2...v2.4.3) (2019-05-15) + + + +### [2.4.2](https://github.com/broofa/mime/compare/v2.4.1...v2.4.2) (2019-04-07) + + +### Bug Fixes + +* don't use arrow function introduced in 2.4.1 ([2e00b5c](https://github.com/broofa/mime/commit/2e00b5c)) + + + +### [2.4.1](https://github.com/broofa/mime/compare/v2.4.0...v2.4.1) (2019-04-03) + + +### Bug Fixes + +* update MDN and mime-db types ([3e567a9](https://github.com/broofa/mime/commit/3e567a9)) + + + +# [2.4.0](https://github.com/broofa/mime/compare/v2.3.1...v2.4.0) (2018-11-26) + + +### Features + +* Bind exported methods ([9d2a7b8](https://github.com/broofa/mime/commit/9d2a7b8)) +* update to mime-db@1.37.0 ([49e6e41](https://github.com/broofa/mime/commit/49e6e41)) + + + +### [2.3.1](https://github.com/broofa/mime/compare/v2.3.0...v2.3.1) (2018-04-11) + + +### Bug Fixes + +* fix [#198](https://github.com/broofa/mime/issues/198) ([25ca180](https://github.com/broofa/mime/commit/25ca180)) + + + +# [2.3.0](https://github.com/broofa/mime/compare/v2.2.2...v2.3.0) (2018-04-11) + + +### Bug Fixes + +* fix [#192](https://github.com/broofa/mime/issues/192) ([5c35df6](https://github.com/broofa/mime/commit/5c35df6)) + + +### Features + +* add travis-ci testing ([d64160f](https://github.com/broofa/mime/commit/d64160f)) + + + +### [2.2.2](https://github.com/broofa/mime/compare/v2.2.1...v2.2.2) (2018-03-30) + + +### Bug Fixes + +* update types files to mime-db@1.32.0 ([85aac16](https://github.com/broofa/mime/commit/85aac16)) + + +### [2.2.1](https://github.com/broofa/mime/compare/v2.2.0...v2.2.1) (2018-03-30) + + +### Bug Fixes + +* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/mime/issues/180) ([b5c83fb](https://github.com/broofa/mime/commit/b5c83fb)) + + + +# [2.2.0](https://github.com/broofa/mime/compare/v2.1.0...v2.2.0) (2018-01-04) + + +### Features + +* Retain type->extension mappings for non-default types. Fixes [#180](https://github.com/broofa/mime/issues/180) ([10f82ac](https://github.com/broofa/mime/commit/10f82ac)) + + + +# [2.1.0](https://github.com/broofa/mime/compare/v2.0.5...v2.1.0) (2017-12-22) + + +### Features + +* Upgrade to mime-db@1.32.0. Fixes [#185](https://github.com/broofa/mime/issues/185) ([3f775ba](https://github.com/broofa/mime/commit/3f775ba)) + + + +### [2.0.5](https://github.com/broofa/mime/compare/v2.0.1...v2.0.5) (2017-12-22) + + +### Bug Fixes + +* ES5 support (back to node v0.4) ([f14ccb6](https://github.com/broofa/mime/commit/f14ccb6)) + + + +# Changelog + +### v2.0.4 (24/11/2017) +- [**closed**] Switch to mime-score module for resolving extension contention issues. [#182](https://github.com/broofa/mime/issues/182) +- [**closed**] Update mime-db to 1.31.0 in v1.x branch [#181](https://github.com/broofa/mime/issues/181) + +--- + +## v1.5.0 (22/11/2017) +- [**closed**] need ES5 version ready in npm package [#179](https://github.com/broofa/mime/issues/179) +- [**closed**] mime-db no trace of iWork - pages / numbers / etc. [#178](https://github.com/broofa/mime/issues/178) +- [**closed**] How it works in brownser ? [#176](https://github.com/broofa/mime/issues/176) +- [**closed**] Missing `./Mime` [#175](https://github.com/broofa/mime/issues/175) +- [**closed**] Vulnerable Regular Expression [#167](https://github.com/broofa/mime/issues/167) + +--- + +### v2.0.3 (25/09/2017) +*No changelog for this release.* + +--- + +### v1.4.1 (25/09/2017) +- [**closed**] Issue when bundling with webpack [#172](https://github.com/broofa/mime/issues/172) + +--- + +### v2.0.2 (15/09/2017) +- [**V2**] fs.readFileSync is not a function [#165](https://github.com/broofa/mime/issues/165) +- [**closed**] The extension for video/quicktime should map to .mov, not .qt [#164](https://github.com/broofa/mime/issues/164) +- [**V2**] [v2 Feedback request] Mime class API [#163](https://github.com/broofa/mime/issues/163) +- [**V2**] [v2 Feedback request] Resolving conflicts over extensions [#162](https://github.com/broofa/mime/issues/162) +- [**V2**] Allow callers to load module with official, full, or no defined types. [#161](https://github.com/broofa/mime/issues/161) +- [**V2**] Use "facets" to resolve extension conflicts [#160](https://github.com/broofa/mime/issues/160) +- [**V2**] Remove fs and path dependencies [#152](https://github.com/broofa/mime/issues/152) +- [**V2**] Default content-type should not be application/octet-stream [#139](https://github.com/broofa/mime/issues/139) +- [**V2**] reset mime-types [#124](https://github.com/broofa/mime/issues/124) +- [**V2**] Extensionless paths should return null or false [#113](https://github.com/broofa/mime/issues/113) + +--- + +### v2.0.1 (14/09/2017) +- [**closed**] Changelog for v2.0 does not mention breaking changes [#171](https://github.com/broofa/mime/issues/171) +- [**closed**] MIME breaking with 'class' declaration as it is without 'use strict mode' [#170](https://github.com/broofa/mime/issues/170) + +--- + +## v2.0.0 (12/09/2017) +- [**closed**] woff and woff2 [#168](https://github.com/broofa/mime/issues/168) + +--- + +## v1.4.0 (28/08/2017) +- [**closed**] support for ac3 voc files [#159](https://github.com/broofa/mime/issues/159) +- [**closed**] Help understanding change from application/xml to text/xml [#158](https://github.com/broofa/mime/issues/158) +- [**closed**] no longer able to override mimetype [#157](https://github.com/broofa/mime/issues/157) +- [**closed**] application/vnd.adobe.photoshop [#147](https://github.com/broofa/mime/issues/147) +- [**closed**] Directories should appear as something other than application/octet-stream [#135](https://github.com/broofa/mime/issues/135) +- [**closed**] requested features [#131](https://github.com/broofa/mime/issues/131) +- [**closed**] Make types.json loading optional? [#129](https://github.com/broofa/mime/issues/129) +- [**closed**] Cannot find module './types.json' [#120](https://github.com/broofa/mime/issues/120) +- [**V2**] .wav files show up as "audio/x-wav" instead of "audio/x-wave" [#118](https://github.com/broofa/mime/issues/118) +- [**closed**] Don't be a pain in the ass for node community [#108](https://github.com/broofa/mime/issues/108) +- [**closed**] don't make default_type global [#78](https://github.com/broofa/mime/issues/78) +- [**closed**] mime.extension() fails if the content-type is parameterized [#74](https://github.com/broofa/mime/issues/74) + +--- + +### v1.3.6 (11/05/2017) +- [**closed**] .md should be text/markdown as of March 2016 [#154](https://github.com/broofa/mime/issues/154) +- [**closed**] Error while installing mime [#153](https://github.com/broofa/mime/issues/153) +- [**closed**] application/manifest+json [#149](https://github.com/broofa/mime/issues/149) +- [**closed**] Dynamic adaptive streaming over HTTP (DASH) file extension typo [#141](https://github.com/broofa/mime/issues/141) +- [**closed**] charsets image/png undefined [#140](https://github.com/broofa/mime/issues/140) +- [**closed**] Mime-db dependency out of date [#130](https://github.com/broofa/mime/issues/130) +- [**closed**] how to support plist? [#126](https://github.com/broofa/mime/issues/126) +- [**closed**] how does .types file format look like? [#123](https://github.com/broofa/mime/issues/123) +- [**closed**] Feature: support for expanding MIME patterns [#121](https://github.com/broofa/mime/issues/121) +- [**closed**] DEBUG_MIME doesn't work [#117](https://github.com/broofa/mime/issues/117) + +--- + +### v1.3.4 (06/02/2015) +*No changelog for this release.* + +--- + +### v1.3.3 (06/02/2015) +*No changelog for this release.* + +--- + +### v1.3.1 (05/02/2015) +- [**closed**] Consider adding support for Handlebars .hbs file ending [#111](https://github.com/broofa/mime/issues/111) +- [**closed**] Consider adding support for hjson. [#110](https://github.com/broofa/mime/issues/110) +- [**closed**] Add mime type for Opus audio files [#94](https://github.com/broofa/mime/issues/94) +- [**closed**] Consider making the `Requesting New Types` information more visible [#77](https://github.com/broofa/mime/issues/77) + +--- + +## v1.3.0 (05/02/2015) +- [**closed**] Add common name? [#114](https://github.com/broofa/mime/issues/114) +- [**closed**] application/x-yaml [#104](https://github.com/broofa/mime/issues/104) +- [**closed**] Add mime type for WOFF file format 2.0 [#102](https://github.com/broofa/mime/issues/102) +- [**closed**] application/x-msi for .msi [#99](https://github.com/broofa/mime/issues/99) +- [**closed**] Add mimetype for gettext translation files [#98](https://github.com/broofa/mime/issues/98) +- [**closed**] collaborators [#88](https://github.com/broofa/mime/issues/88) +- [**closed**] getting errot in installation of mime module...any1 can help? [#87](https://github.com/broofa/mime/issues/87) +- [**closed**] should application/json's charset be utf8? [#86](https://github.com/broofa/mime/issues/86) +- [**closed**] Add "license" and "licenses" to package.json [#81](https://github.com/broofa/mime/issues/81) +- [**closed**] lookup with extension-less file on Windows returns wrong type [#68](https://github.com/broofa/mime/issues/68) + +--- + +### v1.2.11 (15/08/2013) +- [**closed**] Update mime.types [#65](https://github.com/broofa/mime/issues/65) +- [**closed**] Publish a new version [#63](https://github.com/broofa/mime/issues/63) +- [**closed**] README should state upfront that "application/octet-stream" is default for unknown extension [#55](https://github.com/broofa/mime/issues/55) +- [**closed**] Suggested improvement to the charset API [#52](https://github.com/broofa/mime/issues/52) + +--- + +### v1.2.10 (25/07/2013) +- [**closed**] Mime type for woff files should be application/font-woff and not application/x-font-woff [#62](https://github.com/broofa/mime/issues/62) +- [**closed**] node.types in conflict with mime.types [#51](https://github.com/broofa/mime/issues/51) + +--- + +### v1.2.9 (17/01/2013) +- [**closed**] Please update "mime" NPM [#49](https://github.com/broofa/mime/issues/49) +- [**closed**] Please add semicolon [#46](https://github.com/broofa/mime/issues/46) +- [**closed**] parse full mime types [#43](https://github.com/broofa/mime/issues/43) + +--- + +### v1.2.8 (10/01/2013) +- [**closed**] /js directory mime is application/javascript. Is it correct? [#47](https://github.com/broofa/mime/issues/47) +- [**closed**] Add mime types for lua code. [#45](https://github.com/broofa/mime/issues/45) + +--- + +### v1.2.7 (19/10/2012) +- [**closed**] cannot install 1.2.7 via npm [#41](https://github.com/broofa/mime/issues/41) +- [**closed**] Transfer ownership to @broofa [#36](https://github.com/broofa/mime/issues/36) +- [**closed**] it's wrong to set charset to UTF-8 for text [#30](https://github.com/broofa/mime/issues/30) +- [**closed**] Allow multiple instances of MIME types container [#27](https://github.com/broofa/mime/issues/27) diff --git a/node_modules/mime/LICENSE b/node_modules/mime/LICENSE new file mode 100644 index 0000000..d3f46f7 --- /dev/null +++ b/node_modules/mime/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 Benjamin Thomas, Robert Kieffer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mime/Mime.js b/node_modules/mime/Mime.js new file mode 100644 index 0000000..969a66e --- /dev/null +++ b/node_modules/mime/Mime.js @@ -0,0 +1,97 @@ +'use strict'; + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; diff --git a/node_modules/mime/README.md b/node_modules/mime/README.md new file mode 100644 index 0000000..b08316f --- /dev/null +++ b/node_modules/mime/README.md @@ -0,0 +1,187 @@ + +# Mime + +A comprehensive, compact MIME type module. + +[![Build Status](https://travis-ci.org/broofa/mime.svg?branch=master)](https://travis-ci.org/broofa/mime) + +## Version 2 Notes + +Version 2 is a breaking change from 1.x as the semver implies. Specifically: + +* `lookup()` renamed to `getType()` +* `extension()` renamed to `getExtension()` +* `charset()` and `load()` methods have been removed + +If you prefer the legacy version of this module please `npm install mime@^1`. Version 1 docs may be found [here](https://github.com/broofa/mime/tree/v1.4.0). + +## Install + +### NPM +``` +npm install mime +``` + +### Browser + +It is recommended that you use a bundler such as +[webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) to +package your code. However, browser-ready versions are available via wzrd.in. +E.g. For the full version: + + + + +Or, for the `mime/lite` version: + + + + +## Quick Start + +For the full version (800+ MIME types, 1,000+ extensions): + +```javascript +const mime = require('mime'); + +mime.getType('txt'); // ⇨ 'text/plain' +mime.getExtension('text/plain'); // ⇨ 'txt' +``` + +See [Mime API](#mime-api) below for API details. + +## Lite Version + +There is also a "lite" version of this module that omits vendor-specific +(`*/vnd.*`) and experimental (`*/x-*`) types. It weighs in at ~2.5KB, compared +to 8KB for the full version. To load the lite version: + +```javascript +const mime = require('mime/lite'); +``` + +## Mime .vs. mime-types .vs. mime-db modules + +For those of you wondering about the difference between these [popular] NPM modules, +here's a brief rundown ... + +[`mime-db`](https://github.com/jshttp/mime-db) is "the source of +truth" for MIME type information. It is not an API. Rather, it is a canonical +dataset of mime type definitions pulled from IANA, Apache, NGINX, and custom mappings +submitted by the Node.js community. + +[`mime-types`](https://github.com/jshttp/mime-types) is a thin +wrapper around mime-db that provides an API drop-in compatible(ish) with `mime @ < v1.3.6` API. + +`mime` is, as of v2, a self-contained module bundled with a pre-optimized version +of the `mime-db` dataset. It provides a simplified API with the following characteristics: + +* Intelligently resolved type conflicts (See [mime-score](https://github.com/broofa/mime-score) for details) +* Method naming consistent with industry best-practices +* Compact footprint. E.g. The minified+compressed sizes of the various modules: + +Module | Size +--- | --- +`mime-db` | 18 KB +`mime-types` | same as mime-db +`mime` | 8 KB +`mime/lite` | 2 KB + +## Mime API + +Both `require('mime')` and `require('mime/lite')` return instances of the MIME +class, documented below. + +Note: Inputs to this API are case-insensitive. Outputs (returned values) will +be lowercase. + +### new Mime(typeMap, ... more maps) + +Most users of this module will not need to create Mime instances directly. +However if you would like to create custom mappings, you may do so as follows +... + +```javascript +// Require Mime class +const Mime = require('mime/Mime'); + +// Define mime type -> extensions map +const typeMap = { + 'text/abc': ['abc', 'alpha', 'bet'], + 'text/def': ['leppard'] +}; + +// Create and use Mime instance +const myMime = new Mime(typeMap); +myMime.getType('abc'); // ⇨ 'text/abc' +myMime.getExtension('text/def'); // ⇨ 'leppard' +``` + +If more than one map argument is provided, each map is `define()`ed (see below), in order. + +### mime.getType(pathOrExtension) + +Get mime type for the given path or extension. E.g. + +```javascript +mime.getType('js'); // ⇨ 'application/javascript' +mime.getType('json'); // ⇨ 'application/json' + +mime.getType('txt'); // ⇨ 'text/plain' +mime.getType('dir/text.txt'); // ⇨ 'text/plain' +mime.getType('dir\\text.txt'); // ⇨ 'text/plain' +mime.getType('.text.txt'); // ⇨ 'text/plain' +mime.getType('.txt'); // ⇨ 'text/plain' +``` + +`null` is returned in cases where an extension is not detected or recognized + +```javascript +mime.getType('foo/txt'); // ⇨ null +mime.getType('bogus_type'); // ⇨ null +``` + +### mime.getExtension(type) +Get extension for the given mime type. Charset options (often included in +Content-Type headers) are ignored. + +```javascript +mime.getExtension('text/plain'); // ⇨ 'txt' +mime.getExtension('application/json'); // ⇨ 'json' +mime.getExtension('text/html; charset=utf8'); // ⇨ 'html' +``` + +### mime.define(typeMap[, force = false]) + +Define [more] type mappings. + +`typeMap` is a map of type -> extensions, as documented in `new Mime`, above. + +By default this method will throw an error if you try to map a type to an +extension that is already assigned to another type. Passing `true` for the +`force` argument will suppress this behavior (overriding any previous mapping). + +```javascript +mime.define({'text/x-abc': ['abc', 'abcd']}); + +mime.getType('abcd'); // ⇨ 'text/x-abc' +mime.getExtension('text/x-abc') // ⇨ 'abc' +``` + +## Command Line + + mime [path_or_extension] + +E.g. + + > mime scripts/jquery.js + application/javascript + +---- +Markdown generated from [src/README_js.md](src/README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/mime/cli.js b/node_modules/mime/cli.js new file mode 100755 index 0000000..ab70a49 --- /dev/null +++ b/node_modules/mime/cli.js @@ -0,0 +1,46 @@ +#!/usr/bin/env node + +'use strict'; + +process.title = 'mime'; +let mime = require('.'); +let pkg = require('./package.json'); +let args = process.argv.splice(2); + +if (args.includes('--version') || args.includes('-v') || args.includes('--v')) { + console.log(pkg.version); + process.exit(0); +} else if (args.includes('--name') || args.includes('-n') || args.includes('--n')) { + console.log(pkg.name); + process.exit(0); +} else if (args.includes('--help') || args.includes('-h') || args.includes('--h')) { + console.log(pkg.name + ' - ' + pkg.description + '\n'); + console.log(`Usage: + + mime [flags] [path_or_extension] + + Flags: + --help, -h Show this message + --version, -v Display the version + --name, -n Print the name of the program + + Note: the command will exit after it executes if a command is specified + The path_or_extension is the path to the file or the extension of the file. + + Examples: + mime --help + mime --version + mime --name + mime -v + mime src/log.js + mime new.py + mime foo.sh + `); + process.exit(0); +} + +let file = args[0]; +let type = mime.getType(file); + +process.stdout.write(type + '\n'); + diff --git a/node_modules/mime/index.js b/node_modules/mime/index.js new file mode 100644 index 0000000..fadcf8d --- /dev/null +++ b/node_modules/mime/index.js @@ -0,0 +1,4 @@ +'use strict'; + +let Mime = require('./Mime'); +module.exports = new Mime(require('./types/standard'), require('./types/other')); diff --git a/node_modules/mime/lite.js b/node_modules/mime/lite.js new file mode 100644 index 0000000..835cffb --- /dev/null +++ b/node_modules/mime/lite.js @@ -0,0 +1,4 @@ +'use strict'; + +let Mime = require('./Mime'); +module.exports = new Mime(require('./types/standard')); diff --git a/node_modules/mime/package.json b/node_modules/mime/package.json new file mode 100644 index 0000000..df7f369 --- /dev/null +++ b/node_modules/mime/package.json @@ -0,0 +1,52 @@ +{ + "author": { + "name": "Robert Kieffer", + "url": "http://github.com/broofa", + "email": "robert@broofa.com" + }, + "engines": { + "node": ">=4.0.0" + }, + "bin": { + "mime": "cli.js" + }, + "contributors": [], + "description": "A comprehensive library for mime-type mapping", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "benchmark": "*", + "chalk": "4.1.2", + "eslint": "8.1.0", + "mime-db": "1.50.0", + "mime-score": "1.2.0", + "mime-types": "2.1.33", + "mocha": "9.1.3", + "runmd": "*", + "standard-version": "9.3.2" + }, + "files": [ + "index.js", + "lite.js", + "Mime.js", + "cli.js", + "/types" + ], + "scripts": { + "prepare": "node src/build.js && runmd --output README.md src/README_js.md", + "release": "standard-version", + "benchmark": "node src/benchmark.js", + "md": "runmd --watch --output README.md src/README_js.md", + "test": "mocha src/test.js" + }, + "keywords": [ + "util", + "mime" + ], + "name": "mime", + "repository": { + "url": "https://github.com/broofa/mime", + "type": "git" + }, + "version": "2.6.0" +} diff --git a/node_modules/mime/types/other.js b/node_modules/mime/types/other.js new file mode 100644 index 0000000..bb6a035 --- /dev/null +++ b/node_modules/mime/types/other.js @@ -0,0 +1 @@ +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; \ No newline at end of file diff --git a/node_modules/mime/types/standard.js b/node_modules/mime/types/standard.js new file mode 100644 index 0000000..5ee9937 --- /dev/null +++ b/node_modules/mime/types/standard.js @@ -0,0 +1 @@ +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; \ No newline at end of file diff --git a/node_modules/morgan/HISTORY.md b/node_modules/morgan/HISTORY.md new file mode 100644 index 0000000..436db47 --- /dev/null +++ b/node_modules/morgan/HISTORY.md @@ -0,0 +1,215 @@ +1.10.0 / 2020-03-20 +=================== + + * Add `:total-time` token + * Fix trailing space in colored status code for `dev` format + * deps: basic-auth@~2.0.1 + - deps: safe-buffer@5.1.2 + * deps: depd@~2.0.0 + - Replace internal `eval` usage with `Function` constructor + - Use instance methods on `process` to check for listeners + * deps: on-headers@~1.0.2 + - Fix `res.writeHead` patch missing return value + +1.9.1 / 2018-09-10 +================== + + * Fix using special characters in format + * deps: depd@~1.1.2 + - perf: remove argument reassignment + +1.9.0 / 2017-09-26 +================== + + * Use `res.headersSent` when available + * deps: basic-auth@~2.0.0 + - Use `safe-buffer` for improved Buffer API + * deps: debug@2.6.9 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + +1.8.2 / 2017-05-23 +================== + + * deps: debug@2.6.8 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + +1.8.1 / 2017-02-04 +================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.8.0 / 2017-02-04 +================== + + * Fix sending unnecessary `undefined` argument to token functions + * deps: basic-auth@~1.1.0 + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * perf: enable strict mode in compiled functions + +1.7.0 / 2016-02-18 +================== + + * Add `digits` argument to `response-time` token + * deps: depd@~1.1.0 + - Enable strict mode in more places + - Support web browser loading + * deps: on-headers@~1.0.1 + - perf: enable strict mode + +1.6.1 / 2015-07-03 +================== + + * deps: basic-auth@~1.0.3 + +1.6.0 / 2015-06-12 +================== + + * Add `morgan.compile(format)` export + * Do not color 1xx status codes in `dev` format + * Fix `response-time` token to not include response latency + * Fix `status` token incorrectly displaying before response in `dev` format + * Fix token return values to be `undefined` or a string + * Improve representation of multiple headers in `req` and `res` tokens + * Use `res.getHeader` in `res` token + * deps: basic-auth@~1.0.2 + - perf: enable strict mode + - perf: hoist regular expression + - perf: parse with regular expressions + - perf: remove argument reassignment + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * pref: enable strict mode + * pref: reduce function closure scopes + * pref: remove dynamic compile on every request for `dev` format + * pref: remove an argument reassignment + * pref: skip function call without `skip` option + +1.5.3 / 2015-05-10 +================== + + * deps: basic-auth@~1.0.1 + * deps: debug@~2.2.0 + - deps: ms@0.7.1 + * deps: depd@~1.0.1 + * deps: on-finished@~2.2.1 + - Fix `isFinished(req)` when data buffered + +1.5.2 / 2015-03-15 +================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + +1.5.1 / 2014-12-31 +================== + + * deps: debug@~2.1.1 + * deps: on-finished@~2.2.0 + +1.5.0 / 2014-11-06 +================== + + * Add multiple date formats + - `clf` for the common log format + - `iso` for the common ISO 8601 date time format + - `web` for the common RFC 1123 date time format + * Deprecate `buffer` option + * Fix date format in `common` and `combined` formats + * Fix token arguments to accept values with `"` + +1.4.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + +1.4.0 / 2014-10-16 +================== + + * Add `debug` messages + * deps: depd@~1.0.0 + +1.3.2 / 2014-09-27 +================== + + * Fix `req.ip` integration when `immediate: false` + +1.3.1 / 2014-09-14 +================== + + * Remove un-used `bytes` dependency + * deps: depd@0.4.5 + +1.3.0 / 2014-09-01 +================== + + * Assert if `format` is not a function or string + +1.2.3 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.2.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.2.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.2.0 / 2014-07-19 +================== + + * Add `:remote-user` token + * Add `combined` log format + * Add `common` log format + * Add `morgan(format, options)` function signature + * Deprecate `default` format -- use `combined` format instead + * Deprecate not providing a format + * Remove non-standard grey color from `dev` format + +1.1.1 / 2014-05-20 +================== + + * simplify method to get remote address + +1.1.0 / 2014-05-18 +================== + + * "dev" format will use same tokens as other formats + * `:response-time` token is now empty when immediate used + * `:response-time` token is now monotonic + * `:response-time` token has precision to 1 μs + * fix `:status` + immediate output in node.js 0.8 + * improve `buffer` option to prevent indefinite event loop holding + * deps: bytes@1.0.0 + - add negative support + +1.0.1 / 2014-05-04 +================== + + * Make buffer unique per morgan instance + * deps: bytes@0.3.0 + * added terabyte support + +1.0.0 / 2014-02-08 +================== + + * Initial release diff --git a/node_modules/morgan/LICENSE b/node_modules/morgan/LICENSE new file mode 100644 index 0000000..3fefed9 --- /dev/null +++ b/node_modules/morgan/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/morgan/README.md b/node_modules/morgan/README.md new file mode 100644 index 0000000..fc1c4b4 --- /dev/null +++ b/node_modules/morgan/README.md @@ -0,0 +1,417 @@ +# morgan + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +HTTP request logger middleware for node.js + +> Named after [Dexter](http://en.wikipedia.org/wiki/Dexter_Morgan), a show you should not watch until completion. + +## API + + + +```js +var morgan = require('morgan') +``` + +### morgan(format, options) + +Create a new morgan logger middleware function using the given `format` and `options`. +The `format` argument may be a string of a predefined name (see below for the names), +a string of a format string, or a function that will produce a log entry. + +The `format` function will be called with three arguments `tokens`, `req`, and `res`, +where `tokens` is an object with all defined tokens, `req` is the HTTP request and `res` +is the HTTP response. The function is expected to return a string that will be the log +line, or `undefined` / `null` to skip logging. + +#### Using a predefined format string + + + +```js +morgan('tiny') +``` + +#### Using format string of predefined tokens + + + +```js +morgan(':method :url :status :res[content-length] - :response-time ms') +``` + +#### Using a custom format function + + + +``` js +morgan(function (tokens, req, res) { + return [ + tokens.method(req, res), + tokens.url(req, res), + tokens.status(req, res), + tokens.res(req, res, 'content-length'), '-', + tokens['response-time'](req, res), 'ms' + ].join(' ') +}) +``` + +#### Options + +Morgan accepts these properties in the options object. + +##### immediate + +Write log line on request instead of response. This means that a requests will +be logged even if the server crashes, _but data from the response (like the +response code, content length, etc.) cannot be logged_. + +##### skip + +Function to determine if logging is skipped, defaults to `false`. This function +will be called as `skip(req, res)`. + + + +```js +// EXAMPLE: only log error responses +morgan('combined', { + skip: function (req, res) { return res.statusCode < 400 } +}) +``` + +##### stream + +Output stream for writing log lines, defaults to `process.stdout`. + +#### Predefined Formats + +There are various pre-defined formats provided: + +##### combined + +Standard Apache combined log output. + +``` +:remote-addr - :remote-user [:date[clf]] ":method :url HTTP/:http-version" :status :res[content-length] ":referrer" ":user-agent" +``` + +##### common + +Standard Apache common log output. + +``` +:remote-addr - :remote-user [:date[clf]] ":method :url HTTP/:http-version" :status :res[content-length] +``` + +##### dev + +Concise output colored by response status for development use. The `:status` +token will be colored green for success codes, red for server error codes, +yellow for client error codes, cyan for redirection codes, and uncolored +for information codes. + +``` +:method :url :status :response-time ms - :res[content-length] +``` + +##### short + +Shorter than default, also including response time. + +``` +:remote-addr :remote-user :method :url HTTP/:http-version :status :res[content-length] - :response-time ms +``` + +##### tiny + +The minimal output. + +``` +:method :url :status :res[content-length] - :response-time ms +``` + +#### Tokens + +##### Creating new tokens + +To define a token, simply invoke `morgan.token()` with the name and a callback function. +This callback function is expected to return a string value. The value returned is then +available as ":type" in this case: + + + +```js +morgan.token('type', function (req, res) { return req.headers['content-type'] }) +``` + +Calling `morgan.token()` using the same name as an existing token will overwrite that +token definition. + +The token function is expected to be called with the arguments `req` and `res`, representing +the HTTP request and HTTP response. Additionally, the token can accept further arguments of +it's choosing to customize behavior. + +##### :date[format] + +The current date and time in UTC. The available formats are: + + - `clf` for the common log format (`"10/Oct/2000:13:55:36 +0000"`) + - `iso` for the common ISO 8601 date time format (`2000-10-10T13:55:36.000Z`) + - `web` for the common RFC 1123 date time format (`Tue, 10 Oct 2000 13:55:36 GMT`) + +If no format is given, then the default is `web`. + +##### :http-version + +The HTTP version of the request. + +##### :method + +The HTTP method of the request. + +##### :referrer + +The Referrer header of the request. This will use the standard mis-spelled Referer header if exists, otherwise Referrer. + +##### :remote-addr + +The remote address of the request. This will use `req.ip`, otherwise the standard `req.connection.remoteAddress` value (socket address). + +##### :remote-user + +The user authenticated as part of Basic auth for the request. + +##### :req[header] + +The given `header` of the request. If the header is not present, the +value will be displayed as `"-"` in the log. + +##### :res[header] + +The given `header` of the response. If the header is not present, the +value will be displayed as `"-"` in the log. + +##### :response-time[digits] + +The time between the request coming into `morgan` and when the response +headers are written, in milliseconds. + +The `digits` argument is a number that specifies the number of digits to +include on the number, defaulting to `3`, which provides microsecond precision. + +##### :status + +The status code of the response. + +If the request/response cycle completes before a response was sent to the +client (for example, the TCP socket closed prematurely by a client aborting +the request), then the status will be empty (displayed as `"-"` in the log). + +##### :total-time[digits] + +The time between the request coming into `morgan` and when the response +has finished being written out to the connection, in milliseconds. + +The `digits` argument is a number that specifies the number of digits to +include on the number, defaulting to `3`, which provides microsecond precision. + +##### :url + +The URL of the request. This will use `req.originalUrl` if exists, otherwise `req.url`. + +##### :user-agent + +The contents of the User-Agent header of the request. + +### morgan.compile(format) + +Compile a format string into a `format` function for use by `morgan`. A format string +is a string that represents a single log line and can utilize token syntax. +Tokens are references by `:token-name`. If tokens accept arguments, they can +be passed using `[]`, for example: `:token-name[pretty]` would pass the string +`'pretty'` as an argument to the token `token-name`. + +The function returned from `morgan.compile` takes three arguments `tokens`, `req`, and +`res`, where `tokens` is object with all defined tokens, `req` is the HTTP request and +`res` is the HTTP response. The function will return a string that will be the log line, +or `undefined` / `null` to skip logging. + +Normally formats are defined using `morgan.format(name, format)`, but for certain +advanced uses, this compile function is directly available. + +## Examples + +### express/connect + +Simple app that will log all request in the Apache combined format to STDOUT + +```js +var express = require('express') +var morgan = require('morgan') + +var app = express() + +app.use(morgan('combined')) + +app.get('/', function (req, res) { + res.send('hello, world!') +}) +``` + +### vanilla http server + +Simple app that will log all request in the Apache combined format to STDOUT + +```js +var finalhandler = require('finalhandler') +var http = require('http') +var morgan = require('morgan') + +// create "middleware" +var logger = morgan('combined') + +http.createServer(function (req, res) { + var done = finalhandler(req, res) + logger(req, res, function (err) { + if (err) return done(err) + + // respond to request + res.setHeader('content-type', 'text/plain') + res.end('hello, world!') + }) +}) +``` + +### write logs to a file + +#### single file + +Simple app that will log all requests in the Apache combined format to the file +`access.log`. + +```js +var express = require('express') +var fs = require('fs') +var morgan = require('morgan') +var path = require('path') + +var app = express() + +// create a write stream (in append mode) +var accessLogStream = fs.createWriteStream(path.join(__dirname, 'access.log'), { flags: 'a' }) + +// setup the logger +app.use(morgan('combined', { stream: accessLogStream })) + +app.get('/', function (req, res) { + res.send('hello, world!') +}) +``` + +#### log file rotation + +Simple app that will log all requests in the Apache combined format to one log +file per day in the `log/` directory using the +[rotating-file-stream module](https://www.npmjs.com/package/rotating-file-stream). + +```js +var express = require('express') +var morgan = require('morgan') +var path = require('path') +var rfs = require('rotating-file-stream') // version 2.x + +var app = express() + +// create a rotating write stream +var accessLogStream = rfs.createStream('access.log', { + interval: '1d', // rotate daily + path: path.join(__dirname, 'log') +}) + +// setup the logger +app.use(morgan('combined', { stream: accessLogStream })) + +app.get('/', function (req, res) { + res.send('hello, world!') +}) +``` + +### split / dual logging + +The `morgan` middleware can be used as many times as needed, enabling +combinations like: + + * Log entry on request and one on response + * Log all requests to file, but errors to console + * ... and more! + +Sample app that will log all requests to a file using Apache format, but +error responses are logged to the console: + +```js +var express = require('express') +var fs = require('fs') +var morgan = require('morgan') +var path = require('path') + +var app = express() + +// log only 4xx and 5xx responses to console +app.use(morgan('dev', { + skip: function (req, res) { return res.statusCode < 400 } +})) + +// log all requests to access.log +app.use(morgan('common', { + stream: fs.createWriteStream(path.join(__dirname, 'access.log'), { flags: 'a' }) +})) + +app.get('/', function (req, res) { + res.send('hello, world!') +}) +``` + +### use custom token formats + +Sample app that will use custom token formats. This adds an ID to all requests and displays it using the `:id` token. + +```js +var express = require('express') +var morgan = require('morgan') +var uuid = require('node-uuid') + +morgan.token('id', function getId (req) { + return req.id +}) + +var app = express() + +app.use(assignId) +app.use(morgan(':id :method :url :response-time')) + +app.get('/', function (req, res) { + res.send('hello, world!') +}) + +function assignId (req, res, next) { + req.id = uuid.v4() + next() +} +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/morgan/master +[coveralls-url]: https://coveralls.io/r/expressjs/morgan?branch=master +[npm-downloads-image]: https://badgen.net/npm/dm/morgan +[npm-url]: https://npmjs.org/package/morgan +[npm-version-image]: https://badgen.net/npm/v/morgan +[travis-image]: https://badgen.net/travis/expressjs/morgan/master +[travis-url]: https://travis-ci.org/expressjs/morgan diff --git a/node_modules/morgan/index.js b/node_modules/morgan/index.js new file mode 100644 index 0000000..b33c4f2 --- /dev/null +++ b/node_modules/morgan/index.js @@ -0,0 +1,544 @@ +/*! + * morgan + * Copyright(c) 2010 Sencha Inc. + * Copyright(c) 2011 TJ Holowaychuk + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = morgan +module.exports.compile = compile +module.exports.format = format +module.exports.token = token + +/** + * Module dependencies. + * @private + */ + +var auth = require('basic-auth') +var debug = require('debug')('morgan') +var deprecate = require('depd')('morgan') +var onFinished = require('on-finished') +var onHeaders = require('on-headers') + +/** + * Array of CLF month names. + * @private + */ + +var CLF_MONTH = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' +] + +/** + * Default log buffer duration. + * @private + */ + +var DEFAULT_BUFFER_DURATION = 1000 + +/** + * Create a logger middleware. + * + * @public + * @param {String|Function} format + * @param {Object} [options] + * @return {Function} middleware + */ + +function morgan (format, options) { + var fmt = format + var opts = options || {} + + if (format && typeof format === 'object') { + opts = format + fmt = opts.format || 'default' + + // smart deprecation message + deprecate('morgan(options): use morgan(' + (typeof fmt === 'string' ? JSON.stringify(fmt) : 'format') + ', options) instead') + } + + if (fmt === undefined) { + deprecate('undefined format: specify a format') + } + + // output on request instead of response + var immediate = opts.immediate + + // check if log entry should be skipped + var skip = opts.skip || false + + // format function + var formatLine = typeof fmt !== 'function' + ? getFormatFunction(fmt) + : fmt + + // stream + var buffer = opts.buffer + var stream = opts.stream || process.stdout + + // buffering support + if (buffer) { + deprecate('buffer option') + + // flush interval + var interval = typeof buffer !== 'number' + ? DEFAULT_BUFFER_DURATION + : buffer + + // swap the stream + stream = createBufferStream(stream, interval) + } + + return function logger (req, res, next) { + // request data + req._startAt = undefined + req._startTime = undefined + req._remoteAddress = getip(req) + + // response data + res._startAt = undefined + res._startTime = undefined + + // record request start + recordStartTime.call(req) + + function logRequest () { + if (skip !== false && skip(req, res)) { + debug('skip request') + return + } + + var line = formatLine(morgan, req, res) + + if (line == null) { + debug('skip line') + return + } + + debug('log request') + stream.write(line + '\n') + }; + + if (immediate) { + // immediate log + logRequest() + } else { + // record response start + onHeaders(res, recordStartTime) + + // log when response finished + onFinished(res, logRequest) + } + + next() + } +} + +/** + * Apache combined log format. + */ + +morgan.format('combined', ':remote-addr - :remote-user [:date[clf]] ":method :url HTTP/:http-version" :status :res[content-length] ":referrer" ":user-agent"') + +/** + * Apache common log format. + */ + +morgan.format('common', ':remote-addr - :remote-user [:date[clf]] ":method :url HTTP/:http-version" :status :res[content-length]') + +/** + * Default format. + */ + +morgan.format('default', ':remote-addr - :remote-user [:date] ":method :url HTTP/:http-version" :status :res[content-length] ":referrer" ":user-agent"') +deprecate.property(morgan, 'default', 'default format: use combined format') + +/** + * Short format. + */ + +morgan.format('short', ':remote-addr :remote-user :method :url HTTP/:http-version :status :res[content-length] - :response-time ms') + +/** + * Tiny format. + */ + +morgan.format('tiny', ':method :url :status :res[content-length] - :response-time ms') + +/** + * dev (colored) + */ + +morgan.format('dev', function developmentFormatLine (tokens, req, res) { + // get the status code if response written + var status = headersSent(res) + ? res.statusCode + : undefined + + // get status color + var color = status >= 500 ? 31 // red + : status >= 400 ? 33 // yellow + : status >= 300 ? 36 // cyan + : status >= 200 ? 32 // green + : 0 // no color + + // get colored function + var fn = developmentFormatLine[color] + + if (!fn) { + // compile + fn = developmentFormatLine[color] = compile('\x1b[0m:method :url \x1b[' + + color + 'm:status\x1b[0m :response-time ms - :res[content-length]\x1b[0m') + } + + return fn(tokens, req, res) +}) + +/** + * request url + */ + +morgan.token('url', function getUrlToken (req) { + return req.originalUrl || req.url +}) + +/** + * request method + */ + +morgan.token('method', function getMethodToken (req) { + return req.method +}) + +/** + * response time in milliseconds + */ + +morgan.token('response-time', function getResponseTimeToken (req, res, digits) { + if (!req._startAt || !res._startAt) { + // missing request and/or response start time + return + } + + // calculate diff + var ms = (res._startAt[0] - req._startAt[0]) * 1e3 + + (res._startAt[1] - req._startAt[1]) * 1e-6 + + // return truncated value + return ms.toFixed(digits === undefined ? 3 : digits) +}) + +/** + * total time in milliseconds + */ + +morgan.token('total-time', function getTotalTimeToken (req, res, digits) { + if (!req._startAt || !res._startAt) { + // missing request and/or response start time + return + } + + // time elapsed from request start + var elapsed = process.hrtime(req._startAt) + + // cover to milliseconds + var ms = (elapsed[0] * 1e3) + (elapsed[1] * 1e-6) + + // return truncated value + return ms.toFixed(digits === undefined ? 3 : digits) +}) + +/** + * current date + */ + +morgan.token('date', function getDateToken (req, res, format) { + var date = new Date() + + switch (format || 'web') { + case 'clf': + return clfdate(date) + case 'iso': + return date.toISOString() + case 'web': + return date.toUTCString() + } +}) + +/** + * response status code + */ + +morgan.token('status', function getStatusToken (req, res) { + return headersSent(res) + ? String(res.statusCode) + : undefined +}) + +/** + * normalized referrer + */ + +morgan.token('referrer', function getReferrerToken (req) { + return req.headers.referer || req.headers.referrer +}) + +/** + * remote address + */ + +morgan.token('remote-addr', getip) + +/** + * remote user + */ + +morgan.token('remote-user', function getRemoteUserToken (req) { + // parse basic credentials + var credentials = auth(req) + + // return username + return credentials + ? credentials.name + : undefined +}) + +/** + * HTTP version + */ + +morgan.token('http-version', function getHttpVersionToken (req) { + return req.httpVersionMajor + '.' + req.httpVersionMinor +}) + +/** + * UA string + */ + +morgan.token('user-agent', function getUserAgentToken (req) { + return req.headers['user-agent'] +}) + +/** + * request header + */ + +morgan.token('req', function getRequestToken (req, res, field) { + // get header + var header = req.headers[field.toLowerCase()] + + return Array.isArray(header) + ? header.join(', ') + : header +}) + +/** + * response header + */ + +morgan.token('res', function getResponseHeader (req, res, field) { + if (!headersSent(res)) { + return undefined + } + + // get header + var header = res.getHeader(field) + + return Array.isArray(header) + ? header.join(', ') + : header +}) + +/** + * Format a Date in the common log format. + * + * @private + * @param {Date} dateTime + * @return {string} + */ + +function clfdate (dateTime) { + var date = dateTime.getUTCDate() + var hour = dateTime.getUTCHours() + var mins = dateTime.getUTCMinutes() + var secs = dateTime.getUTCSeconds() + var year = dateTime.getUTCFullYear() + + var month = CLF_MONTH[dateTime.getUTCMonth()] + + return pad2(date) + '/' + month + '/' + year + + ':' + pad2(hour) + ':' + pad2(mins) + ':' + pad2(secs) + + ' +0000' +} + +/** + * Compile a format string into a function. + * + * @param {string} format + * @return {function} + * @public + */ + +function compile (format) { + if (typeof format !== 'string') { + throw new TypeError('argument format must be a string') + } + + var fmt = String(JSON.stringify(format)) + var js = ' "use strict"\n return ' + fmt.replace(/:([-\w]{2,})(?:\[([^\]]+)\])?/g, function (_, name, arg) { + var tokenArguments = 'req, res' + var tokenFunction = 'tokens[' + String(JSON.stringify(name)) + ']' + + if (arg !== undefined) { + tokenArguments += ', ' + String(JSON.stringify(arg)) + } + + return '" +\n (' + tokenFunction + '(' + tokenArguments + ') || "-") + "' + }) + + // eslint-disable-next-line no-new-func + return new Function('tokens, req, res', js) +} + +/** + * Create a basic buffering stream. + * + * @param {object} stream + * @param {number} interval + * @public + */ + +function createBufferStream (stream, interval) { + var buf = [] + var timer = null + + // flush function + function flush () { + timer = null + stream.write(buf.join('')) + buf.length = 0 + } + + // write function + function write (str) { + if (timer === null) { + timer = setTimeout(flush, interval) + } + + buf.push(str) + } + + // return a minimal "stream" + return { write: write } +} + +/** + * Define a format with the given name. + * + * @param {string} name + * @param {string|function} fmt + * @public + */ + +function format (name, fmt) { + morgan[name] = fmt + return this +} + +/** + * Lookup and compile a named format function. + * + * @param {string} name + * @return {function} + * @public + */ + +function getFormatFunction (name) { + // lookup format + var fmt = morgan[name] || name || morgan.default + + // return compiled format + return typeof fmt !== 'function' + ? compile(fmt) + : fmt +} + +/** + * Get request IP address. + * + * @private + * @param {IncomingMessage} req + * @return {string} + */ + +function getip (req) { + return req.ip || + req._remoteAddress || + (req.connection && req.connection.remoteAddress) || + undefined +} + +/** + * Determine if the response headers have been sent. + * + * @param {object} res + * @returns {boolean} + * @private + */ + +function headersSent (res) { + // istanbul ignore next: node.js 0.8 support + return typeof res.headersSent !== 'boolean' + ? Boolean(res._header) + : res.headersSent +} + +/** + * Pad number to two digits. + * + * @private + * @param {number} num + * @return {string} + */ + +function pad2 (num) { + var str = String(num) + + // istanbul ignore next: num is current datetime + return (str.length === 1 ? '0' : '') + str +} + +/** + * Record the start time. + * @private + */ + +function recordStartTime () { + this._startAt = process.hrtime() + this._startTime = new Date() +} + +/** + * Define a token function with the given name, + * and callback fn(req, res). + * + * @param {string} name + * @param {function} fn + * @public + */ + +function token (name, fn) { + morgan[name] = fn + return this +} diff --git a/node_modules/morgan/package.json b/node_modules/morgan/package.json new file mode 100644 index 0000000..b25a647 --- /dev/null +++ b/node_modules/morgan/package.json @@ -0,0 +1,52 @@ +{ + "name": "morgan", + "description": "HTTP request logger middleware for node.js", + "version": "1.10.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "express", + "http", + "logger", + "middleware" + ], + "repository": "expressjs/morgan", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "devDependencies": { + "eslint": "6.8.0", + "eslint-config-standard": "14.1.0", + "eslint-plugin-import": "2.20.1", + "eslint-plugin-markdown": "1.0.2", + "eslint-plugin-node": "9.2.0", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "mocha": "7.1.1", + "nyc": "15.0.0", + "split": "1.0.1", + "supertest": "4.0.2" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8.0" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --check-leaks --reporter spec --bail", + "test-ci": "nyc --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/ms/index.js b/node_modules/ms/index.js new file mode 100644 index 0000000..6a522b1 --- /dev/null +++ b/node_modules/ms/index.js @@ -0,0 +1,152 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + if (ms >= d) { + return Math.round(ms / d) + 'd'; + } + if (ms >= h) { + return Math.round(ms / h) + 'h'; + } + if (ms >= m) { + return Math.round(ms / m) + 'm'; + } + if (ms >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + return plural(ms, d, 'day') || + plural(ms, h, 'hour') || + plural(ms, m, 'minute') || + plural(ms, s, 'second') || + ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) { + return; + } + if (ms < n * 1.5) { + return Math.floor(ms / n) + ' ' + name; + } + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff --git a/node_modules/ms/license.md b/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/ms/package.json b/node_modules/ms/package.json new file mode 100644 index 0000000..6a31c81 --- /dev/null +++ b/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.0.0", + "description": "Tiny milisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "3.19.0", + "expect.js": "0.3.1", + "husky": "0.13.3", + "lint-staged": "3.4.1", + "mocha": "3.4.1" + } +} diff --git a/node_modules/ms/readme.md b/node_modules/ms/readme.md new file mode 100644 index 0000000..84a9974 --- /dev/null +++ b/node_modules/ms/readme.md @@ -0,0 +1,51 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +``` + +### Convert from milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(ms('10 hours')) // "10h" +``` + +### Time format written-out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [node](https://nodejs.org) and in the browser. +- If a number is supplied to `ms`, a string with a unit is returned. +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`). +- If you pass a string with a number and a valid unit, the number of equivalent ms is returned. + +## Caught a bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, node will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/object-inspect/.eslintrc b/node_modules/object-inspect/.eslintrc new file mode 100644 index 0000000..21f9039 --- /dev/null +++ b/node_modules/object-inspect/.eslintrc @@ -0,0 +1,53 @@ +{ + "root": true, + "extends": "@ljharb", + "rules": { + "complexity": 0, + "func-style": [2, "declaration"], + "indent": [2, 4], + "max-lines": 1, + "max-lines-per-function": 1, + "max-params": [2, 4], + "max-statements": 0, + "max-statements-per-line": [2, { "max": 2 }], + "no-magic-numbers": 0, + "no-param-reassign": 1, + "strict": 0, // TODO + }, + "overrides": [ + { + "files": ["test/**", "test-*", "example/**"], + "extends": "@ljharb/eslint-config/tests", + "rules": { + "id-length": 0, + }, + }, + { + "files": ["example/**"], + "rules": { + "no-console": 0, + }, + }, + { + "files": ["test/browser/**"], + "env": { + "browser": true, + }, + }, + { + "files": ["test/bigint*"], + "rules": { + "new-cap": [2, { "capIsNewExceptions": ["BigInt"] }], + }, + }, + { + "files": "index.js", + "globals": { + "HTMLElement": false, + }, + "rules": { + "no-use-before-define": 1, + }, + }, + ], +} diff --git a/node_modules/object-inspect/.github/FUNDING.yml b/node_modules/object-inspect/.github/FUNDING.yml new file mode 100644 index 0000000..730276b --- /dev/null +++ b/node_modules/object-inspect/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/object-inspect +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/object-inspect/.nycrc b/node_modules/object-inspect/.nycrc new file mode 100644 index 0000000..58a5db7 --- /dev/null +++ b/node_modules/object-inspect/.nycrc @@ -0,0 +1,13 @@ +{ + "all": true, + "check-coverage": false, + "instrumentation": false, + "sourceMap": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "example", + "test", + "test-core-js.js" + ] +} diff --git a/node_modules/object-inspect/CHANGELOG.md b/node_modules/object-inspect/CHANGELOG.md new file mode 100644 index 0000000..36d1958 --- /dev/null +++ b/node_modules/object-inspect/CHANGELOG.md @@ -0,0 +1,360 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.12.2](https://github.com/inspect-js/object-inspect/compare/v1.12.1...v1.12.2) - 2022-05-26 + +### Commits + +- [Fix] use `util.inspect` for a custom inspection symbol method [`e243bf2`](https://github.com/inspect-js/object-inspect/commit/e243bf2eda6c4403ac6f1146fddb14d12e9646c1) +- [meta] add support info [`ca20ba3`](https://github.com/inspect-js/object-inspect/commit/ca20ba35713c17068ca912a86c542f5e8acb656c) +- [Fix] ignore `cause` in node v16.9 and v16.10 where it has a bug [`86aa553`](https://github.com/inspect-js/object-inspect/commit/86aa553a4a455562c2c56f1540f0bf857b9d314b) + +## [v1.12.1](https://github.com/inspect-js/object-inspect/compare/v1.12.0...v1.12.1) - 2022-05-21 + +### Commits + +- [Tests] use `mock-property` [`4ec8893`](https://github.com/inspect-js/object-inspect/commit/4ec8893ea9bfd28065ca3638cf6762424bf44352) +- [meta] use `npmignore` to autogenerate an npmignore file [`07f868c`](https://github.com/inspect-js/object-inspect/commit/07f868c10bd25a9d18686528339bb749c211fc9a) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `tape` [`b05244b`](https://github.com/inspect-js/object-inspect/commit/b05244b4f331e00c43b3151bc498041be77ccc91) +- [Dev Deps] update `@ljharb/eslint-config`, `error-cause`, `es-value-fixtures`, `functions-have-names`, `tape` [`d037398`](https://github.com/inspect-js/object-inspect/commit/d037398dcc5d531532e4c19c4a711ed677f579c1) +- [Fix] properly handle callable regexes in older engines [`848fe48`](https://github.com/inspect-js/object-inspect/commit/848fe48bd6dd0064ba781ee6f3c5e54a94144c37) + +## [v1.12.0](https://github.com/inspect-js/object-inspect/compare/v1.11.1...v1.12.0) - 2021-12-18 + +### Commits + +- [New] add `numericSeparator` boolean option [`2d2d537`](https://github.com/inspect-js/object-inspect/commit/2d2d537f5359a4300ce1c10241369f8024f89e11) +- [Robustness] cache more prototype methods [`191533d`](https://github.com/inspect-js/object-inspect/commit/191533da8aec98a05eadd73a5a6e979c9c8653e8) +- [New] ensure an Error’s `cause` is displayed [`53bc2ce`](https://github.com/inspect-js/object-inspect/commit/53bc2cee4e5a9cc4986f3cafa22c0685f340715e) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` [`bc164b6`](https://github.com/inspect-js/object-inspect/commit/bc164b6e2e7d36b263970f16f54de63048b84a36) +- [Robustness] cache `RegExp.prototype.test` [`a314ab8`](https://github.com/inspect-js/object-inspect/commit/a314ab8271b905cbabc594c82914d2485a8daf12) +- [meta] fix auto-changelog settings [`5ed0983`](https://github.com/inspect-js/object-inspect/commit/5ed0983be72f73e32e2559997517a95525c7e20d) + +## [v1.11.1](https://github.com/inspect-js/object-inspect/compare/v1.11.0...v1.11.1) - 2021-12-05 + +### Commits + +- [meta] add `auto-changelog` [`7dbdd22`](https://github.com/inspect-js/object-inspect/commit/7dbdd228401d6025d8b7391476d88aee9ea9bbdf) +- [actions] reuse common workflows [`c8823bc`](https://github.com/inspect-js/object-inspect/commit/c8823bc0a8790729680709d45fb6e652432e91aa) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `safe-publish-latest`, `tape` [`7532b12`](https://github.com/inspect-js/object-inspect/commit/7532b120598307497b712890f75af8056f6d37a6) +- [Refactor] use `has-tostringtag` to behave correctly in the presence of symbol shams [`94abb5d`](https://github.com/inspect-js/object-inspect/commit/94abb5d4e745bf33253942dea86b3e538d2ff6c6) +- [actions] update codecov uploader [`5ed5102`](https://github.com/inspect-js/object-inspect/commit/5ed51025267a00e53b1341357315490ac4eb0874) +- [Dev Deps] update `eslint`, `tape` [`37b2ad2`](https://github.com/inspect-js/object-inspect/commit/37b2ad26c08d94bfd01d5d07069a0b28ef4e2ad7) +- [meta] add `sideEffects` flag [`d341f90`](https://github.com/inspect-js/object-inspect/commit/d341f905ef8bffa6a694cda6ddc5ba343532cd4f) + +## [v1.11.0](https://github.com/inspect-js/object-inspect/compare/v1.10.3...v1.11.0) - 2021-07-12 + +### Commits + +- [New] `customInspect`: add `symbol` option, to mimic modern util.inspect behavior [`e973a6e`](https://github.com/inspect-js/object-inspect/commit/e973a6e21f8140c5837cf25e9d89bdde88dc3120) +- [Dev Deps] update `eslint` [`05f1cb3`](https://github.com/inspect-js/object-inspect/commit/05f1cb3cbcfe1f238e8b51cf9bc294305b7ed793) + +## [v1.10.3](https://github.com/inspect-js/object-inspect/compare/v1.10.2...v1.10.3) - 2021-05-07 + +### Commits + +- [Fix] handle core-js Symbol shams [`4acfc2c`](https://github.com/inspect-js/object-inspect/commit/4acfc2c4b503498759120eb517abad6d51c9c5d6) +- [readme] update badges [`95c323a`](https://github.com/inspect-js/object-inspect/commit/95c323ad909d6cbabb95dd6015c190ba6db9c1f2) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud` [`cb38f48`](https://github.com/inspect-js/object-inspect/commit/cb38f485de6ec7a95109b5a9bbd0a1deba2f6611) + +## [v1.10.2](https://github.com/inspect-js/object-inspect/compare/v1.10.1...v1.10.2) - 2021-04-17 + +### Commits + +- [Fix] use a robust check for a boxed Symbol [`87f12d6`](https://github.com/inspect-js/object-inspect/commit/87f12d6e69ce530be04659c81a4cd502943acac5) + +## [v1.10.1](https://github.com/inspect-js/object-inspect/compare/v1.10.0...v1.10.1) - 2021-04-17 + +### Commits + +- [Fix] use a robust check for a boxed bigint [`d5ca829`](https://github.com/inspect-js/object-inspect/commit/d5ca8298b6d2e5c7b9334a5b21b96ed95d225c91) + +## [v1.10.0](https://github.com/inspect-js/object-inspect/compare/v1.9.0...v1.10.0) - 2021-04-17 + +### Commits + +- [Tests] increase coverage [`d8abb8a`](https://github.com/inspect-js/object-inspect/commit/d8abb8a62c2f084919df994a433b346e0d87a227) +- [actions] use `node/install` instead of `node/run`; use `codecov` action [`4bfec2e`](https://github.com/inspect-js/object-inspect/commit/4bfec2e30aaef6ddef6cbb1448306f9f8b9520b7) +- [New] respect `Symbol.toStringTag` on objects [`799b58f`](https://github.com/inspect-js/object-inspect/commit/799b58f536a45e4484633a8e9daeb0330835f175) +- [Fix] do not allow Symbol.toStringTag to masquerade as builtins [`d6c5b37`](https://github.com/inspect-js/object-inspect/commit/d6c5b37d7e94427796b82432fb0c8964f033a6ab) +- [New] add `WeakRef` support [`b6d898e`](https://github.com/inspect-js/object-inspect/commit/b6d898ee21868c780a7ee66b28532b5b34ed7f09) +- [meta] do not publish github action workflow files [`918cdfc`](https://github.com/inspect-js/object-inspect/commit/918cdfc4b6fe83f559ff6ef04fe66201e3ff5cbd) +- [meta] create `FUNDING.yml` [`0bb5fc5`](https://github.com/inspect-js/object-inspect/commit/0bb5fc516dbcd2cd728bd89cee0b580acc5ce301) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`22c8dc0`](https://github.com/inspect-js/object-inspect/commit/22c8dc0cac113d70f4781e49a950070923a671be) +- [meta] use `prepublishOnly` script for npm 7+ [`e52ee09`](https://github.com/inspect-js/object-inspect/commit/e52ee09e8050b8dbac94ef57f786675567728223) +- [Dev Deps] update `eslint` [`7c4e6fd`](https://github.com/inspect-js/object-inspect/commit/7c4e6fdedcd27cc980e13c9ad834d05a96f3d40c) + +## [v1.9.0](https://github.com/inspect-js/object-inspect/compare/v1.8.0...v1.9.0) - 2020-11-30 + +### Commits + +- [Tests] migrate tests to Github Actions [`d262251`](https://github.com/inspect-js/object-inspect/commit/d262251e13e16d3490b5473672f6b6d6ff86675d) +- [New] add enumerable own Symbols to plain object output [`ee60c03`](https://github.com/inspect-js/object-inspect/commit/ee60c033088cff9d33baa71e59a362a541b48284) +- [Tests] add passing tests [`01ac3e4`](https://github.com/inspect-js/object-inspect/commit/01ac3e4b5a30f97875a63dc9b1416b3bd626afc9) +- [actions] add "Require Allow Edits" action [`c2d7746`](https://github.com/inspect-js/object-inspect/commit/c2d774680cde4ca4af332d84d4121b26f798ba9e) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `core-js` [`70058de`](https://github.com/inspect-js/object-inspect/commit/70058de1579fc54d1d15ed6c2dbe246637ce70ff) +- [Fix] hex characters in strings should be uppercased, to match node `assert` [`6ab8faa`](https://github.com/inspect-js/object-inspect/commit/6ab8faaa0abc08fe7a8e2afd8b39c6f1f0e00113) +- [Tests] run `nyc` on all tests [`4c47372`](https://github.com/inspect-js/object-inspect/commit/4c473727879ddc8e28b599202551ddaaf07b6210) +- [Tests] node 0.8 has an unpredictable property order; fix `groups` test by removing property [`f192069`](https://github.com/inspect-js/object-inspect/commit/f192069a978a3b60e6f0e0d45ac7df260ab9a778) +- [New] add enumerable properties to Function inspect result, per node’s `assert` [`fd38e1b`](https://github.com/inspect-js/object-inspect/commit/fd38e1bc3e2a1dc82091ce3e021917462eee64fc) +- [Tests] fix tests for node < 10, due to regex match `groups` [`2ac6462`](https://github.com/inspect-js/object-inspect/commit/2ac6462cc4f72eaa0b63a8cfee9aabe3008b2330) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` [`44b59e2`](https://github.com/inspect-js/object-inspect/commit/44b59e2676a7f825ef530dfd19dafb599e3b9456) +- [Robustness] cache `Symbol.prototype.toString` [`f3c2074`](https://github.com/inspect-js/object-inspect/commit/f3c2074d8f32faf8292587c07c9678ea931703dd) +- [Dev Deps] update `eslint` [`9411294`](https://github.com/inspect-js/object-inspect/commit/94112944b9245e3302e25453277876402d207e7f) +- [meta] `require-allow-edits` no longer requires an explicit github token [`36c0220`](https://github.com/inspect-js/object-inspect/commit/36c02205de3c2b0e84d53777c5c9fd54a36c48ab) +- [actions] update rebase checkout action to v2 [`55a39a6`](https://github.com/inspect-js/object-inspect/commit/55a39a64e944f19c6a7d8efddf3df27700f20d14) +- [actions] switch Automatic Rebase workflow to `pull_request_target` event [`f59fd3c`](https://github.com/inspect-js/object-inspect/commit/f59fd3cf406c3a7c7ece140904a80bbc6bacfcca) +- [Dev Deps] update `eslint` [`a492bec`](https://github.com/inspect-js/object-inspect/commit/a492becec644b0155c9c4bc1caf6f9fac11fb2c7) + +## [v1.8.0](https://github.com/inspect-js/object-inspect/compare/v1.7.0...v1.8.0) - 2020-06-18 + +### Fixed + +- [New] add `indent` option [`#27`](https://github.com/inspect-js/object-inspect/issues/27) + +### Commits + +- [Tests] add codecov [`4324cbb`](https://github.com/inspect-js/object-inspect/commit/4324cbb1a2bd7710822a4151ff373570db22453e) +- [New] add `maxStringLength` option [`b3995cb`](https://github.com/inspect-js/object-inspect/commit/b3995cb71e15b5ee127a3094c43994df9d973502) +- [New] add `customInspect` option, to disable custom inspect methods [`28b9179`](https://github.com/inspect-js/object-inspect/commit/28b9179ee802bb3b90810100c11637db90c2fb6d) +- [Tests] add Date and RegExp tests [`3b28eca`](https://github.com/inspect-js/object-inspect/commit/3b28eca57b0367aeadffac604ea09e8bdae7d97b) +- [actions] add automatic rebasing / merge commit blocking [`0d9c6c0`](https://github.com/inspect-js/object-inspect/commit/0d9c6c044e83475ff0bfffb9d35b149834c83a2e) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `core-js`, `tape`; add `aud` [`7c204f2`](https://github.com/inspect-js/object-inspect/commit/7c204f22b9e41bc97147f4d32d4cb045b17769a6) +- [readme] fix repo URLs, remove testling [`34ca9a0`](https://github.com/inspect-js/object-inspect/commit/34ca9a0dabfe75bd311f806a326fadad029909a3) +- [Fix] when truncating a deep array, note it as `[Array]` instead of just `[Object]` [`f74c82d`](https://github.com/inspect-js/object-inspect/commit/f74c82dd0b35386445510deb250f34c41be3ec0e) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` [`1a8a5ea`](https://github.com/inspect-js/object-inspect/commit/1a8a5ea069ea2bee89d77caedad83ffa23d35711) +- [Fix] do not be fooled by a function’s own `toString` method [`7cb5c65`](https://github.com/inspect-js/object-inspect/commit/7cb5c657a976f94715c19c10556a30f15bb7d5d7) +- [patch] indicate explicitly that anon functions are anonymous, to match node [`81ebdd4`](https://github.com/inspect-js/object-inspect/commit/81ebdd4215005144074bbdff3f6bafa01407910a) +- [Dev Deps] loosen the `core-js` dep [`e7472e8`](https://github.com/inspect-js/object-inspect/commit/e7472e8e242117670560bd995830c2a4d12080f5) +- [Dev Deps] update `tape` [`699827e`](https://github.com/inspect-js/object-inspect/commit/699827e6b37258b5203c33c78c009bf4b0e6a66d) +- [meta] add `safe-publish-latest` [`c5d2868`](https://github.com/inspect-js/object-inspect/commit/c5d2868d6eb33c472f37a20f89ceef2787046088) +- [Dev Deps] update `@ljharb/eslint-config` [`9199501`](https://github.com/inspect-js/object-inspect/commit/919950195d486114ccebacbdf9d74d7f382693b0) + +## [v1.7.0](https://github.com/inspect-js/object-inspect/compare/v1.6.0...v1.7.0) - 2019-11-10 + +### Commits + +- [Tests] use shared travis-ci configs [`19899ed`](https://github.com/inspect-js/object-inspect/commit/19899edbf31f4f8809acf745ce34ad1ce1bfa63b) +- [Tests] add linting [`a00f057`](https://github.com/inspect-js/object-inspect/commit/a00f057d917f66ea26dd37769c6b810ec4af97e8) +- [Tests] lint last file [`2698047`](https://github.com/inspect-js/object-inspect/commit/2698047b58af1e2e88061598ef37a75f228dddf6) +- [Tests] up to `node` `v12.7`, `v11.15`, `v10.16`, `v8.16`, `v6.17` [`589e87a`](https://github.com/inspect-js/object-inspect/commit/589e87a99cadcff4b600e6a303418e9d922836e8) +- [New] add support for `WeakMap` and `WeakSet` [`3ddb3e4`](https://github.com/inspect-js/object-inspect/commit/3ddb3e4e0c8287130c61a12e0ed9c104b1549306) +- [meta] clean up license so github can detect it properly [`27527bb`](https://github.com/inspect-js/object-inspect/commit/27527bb801520c9610c68cc3b55d6f20a2bee56d) +- [Tests] cover `util.inspect.custom` [`36d47b9`](https://github.com/inspect-js/object-inspect/commit/36d47b9c59056a57ef2f1491602c726359561800) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `core-js`, `tape` [`b614eaa`](https://github.com/inspect-js/object-inspect/commit/b614eaac901da0e5c69151f534671f990a94cace) +- [Tests] fix coverage thresholds [`7b7b176`](https://github.com/inspect-js/object-inspect/commit/7b7b176e15f8bd6e8b2f261ff5a493c2fe78d9c2) +- [Tests] bigint tests now can run on unflagged node [`063af31`](https://github.com/inspect-js/object-inspect/commit/063af31ce9cd13c202e3b67c07ba06dc9b7c0f81) +- [Refactor] add early bailout to `isMap` and `isSet` checks [`fc51047`](https://github.com/inspect-js/object-inspect/commit/fc5104714a3671d37e225813db79470d6335683b) +- [meta] add `funding` field [`7f9953a`](https://github.com/inspect-js/object-inspect/commit/7f9953a113eec7b064a6393cf9f90ba15f1d131b) +- [Tests] Fix invalid strict-mode syntax with hexadecimal [`a8b5425`](https://github.com/inspect-js/object-inspect/commit/a8b542503b4af1599a275209a1a99f5fdedb1ead) +- [Dev Deps] update `@ljharb/eslint-config` [`98df157`](https://github.com/inspect-js/object-inspect/commit/98df1577314d9188a3fc3f17fdcf2fba697ae1bd) +- add copyright to LICENSE [`bb69fd0`](https://github.com/inspect-js/object-inspect/commit/bb69fd017a062d299e44da1f9b2c7dcd67f621e6) +- [Tests] use `npx aud` in `posttest` [`4838353`](https://github.com/inspect-js/object-inspect/commit/4838353593974cf7f905b9ef04c03c094f0cdbe2) +- [Tests] move `0.6` to allowed failures, because it won‘t build on travis [`1bff32a`](https://github.com/inspect-js/object-inspect/commit/1bff32aa52e8aea687f0856b28ba754b3e43ebf7) + +## [v1.6.0](https://github.com/inspect-js/object-inspect/compare/v1.5.0...v1.6.0) - 2018-05-02 + +### Commits + +- [New] add support for boxed BigInt primitives [`356c66a`](https://github.com/inspect-js/object-inspect/commit/356c66a410e7aece7162c8319880a5ef647beaa9) +- [Tests] up to `node` `v10.0`, `v9.11`, `v8.11`, `v6.14`, `v4.9` [`c77b65b`](https://github.com/inspect-js/object-inspect/commit/c77b65bba593811b906b9ec57561c5cba92e2db3) +- [New] Add support for upcoming `BigInt` [`1ac548e`](https://github.com/inspect-js/object-inspect/commit/1ac548e4b27e26466c28c9a5e63e5d4e0591c31f) +- [Tests] run bigint tests in CI with --harmony-bigint flag [`d31b738`](https://github.com/inspect-js/object-inspect/commit/d31b73831880254b5c6cf5691cda9a149fbc5f04) +- [Dev Deps] update `core-js`, `tape` [`ff9eff6`](https://github.com/inspect-js/object-inspect/commit/ff9eff67113341ee1aaf80c1c22d683f43bfbccf) +- [Docs] fix example to use `safer-buffer` [`48cae12`](https://github.com/inspect-js/object-inspect/commit/48cae12a73ec6cacc955175bc56bbe6aee6a211f) + +## [v1.5.0](https://github.com/inspect-js/object-inspect/compare/v1.4.1...v1.5.0) - 2017-12-25 + +### Commits + +- [New] add `quoteStyle` option [`f5a72d2`](https://github.com/inspect-js/object-inspect/commit/f5a72d26edb3959b048f74c056ca7100a6b091e4) +- [Tests] add more test coverage [`30ebe4e`](https://github.com/inspect-js/object-inspect/commit/30ebe4e1fa943b99ecbb85be7614256d536e2759) +- [Tests] require 0.6 to pass [`99a008c`](https://github.com/inspect-js/object-inspect/commit/99a008ccace189a60fd7da18bf00e32c9572b980) + +## [v1.4.1](https://github.com/inspect-js/object-inspect/compare/v1.4.0...v1.4.1) - 2017-12-19 + +### Commits + +- [Tests] up to `node` `v9.3`, `v8.9`, `v6.12` [`6674476`](https://github.com/inspect-js/object-inspect/commit/6674476cc56acaac1bde96c84fed5ef631911906) +- [Fix] `inspect(Object(-0))` should be “Object(-0)”, not “Object(0)” [`d0a031f`](https://github.com/inspect-js/object-inspect/commit/d0a031f1cbb3024ee9982bfe364dd18a7e4d1bd3) + +## [v1.4.0](https://github.com/inspect-js/object-inspect/compare/v1.3.0...v1.4.0) - 2017-10-24 + +### Commits + +- [Tests] add `npm run coverage` [`3b48fb2`](https://github.com/inspect-js/object-inspect/commit/3b48fb25db037235eeb808f0b2830aad7aa36f70) +- [Tests] remove commented-out osx builds [`71e24db`](https://github.com/inspect-js/object-inspect/commit/71e24db8ad6ee3b9b381c5300b0475f2ba595a73) +- [New] add support for `util.inspect.custom`, in node only. [`20cca77`](https://github.com/inspect-js/object-inspect/commit/20cca7762d7e17f15b21a90793dff84acce155df) +- [Tests] up to `node` `v8.6`; use `nvm install-latest-npm` to ensure new npm doesn’t break old node [`252952d`](https://github.com/inspect-js/object-inspect/commit/252952d230d8065851dd3d4d5fe8398aae068529) +- [Tests] up to `node` `v8.8` [`4aa868d`](https://github.com/inspect-js/object-inspect/commit/4aa868d3a62914091d489dd6ec6eed194ee67cd3) +- [Dev Deps] update `core-js`, `tape` [`59483d1`](https://github.com/inspect-js/object-inspect/commit/59483d1df418f852f51fa0db7b24aa6b0209a27a) + +## [v1.3.0](https://github.com/inspect-js/object-inspect/compare/v1.2.2...v1.3.0) - 2017-07-31 + +### Fixed + +- [Fix] Map/Set: work around core-js bug < v2.5.0 [`#9`](https://github.com/inspect-js/object-inspect/issues/9) + +### Commits + +- [New] add support for arrays with additional object keys [`0d19937`](https://github.com/inspect-js/object-inspect/commit/0d199374ee37959e51539616666f420ccb29acb9) +- [Tests] up to `node` `v8.2`, `v7.10`, `v6.11`; fix new npm breaking on older nodes [`e24784a`](https://github.com/inspect-js/object-inspect/commit/e24784a90c49117787157a12a63897c49cf89bbb) +- Only apps should have lockfiles [`c6faebc`](https://github.com/inspect-js/object-inspect/commit/c6faebcb2ee486a889a4a1c4d78c0776c7576185) +- [Dev Deps] update `tape` [`7345a0a`](https://github.com/inspect-js/object-inspect/commit/7345a0aeba7e91b888a079c10004d17696a7f586) + +## [v1.2.2](https://github.com/inspect-js/object-inspect/compare/v1.2.1...v1.2.2) - 2017-03-24 + +### Commits + +- [Tests] up to `node` `v7.7`, `v6.10`, `v4.8`; improve test matrix [`a2ddc15`](https://github.com/inspect-js/object-inspect/commit/a2ddc15a1f2c65af18076eea1c0eb9cbceb478a0) +- [Tests] up to `node` `v7.0`, `v6.9`, `v5.12`, `v4.6`, `io.js` `v3.3`; improve test matrix [`a48949f`](https://github.com/inspect-js/object-inspect/commit/a48949f6b574b2d4d2298109d8e8d0eb3e7a83e7) +- [Performance] check for primitive types as early as possible. [`3b8092a`](https://github.com/inspect-js/object-inspect/commit/3b8092a2a4deffd0575f94334f00194e2d48dad3) +- [Refactor] remove unneeded `else`s. [`7255034`](https://github.com/inspect-js/object-inspect/commit/725503402e08de4f96f6bf2d8edef44ac36f26b6) +- [Refactor] avoid recreating `lowbyte` function every time. [`81edd34`](https://github.com/inspect-js/object-inspect/commit/81edd3475bd15bdd18e84de7472033dcf5004aaa) +- [Fix] differentiate -0 from 0 [`521d345`](https://github.com/inspect-js/object-inspect/commit/521d3456b009da7bf1c5785c8a9df5a9f8718264) +- [Refactor] move object key gathering into separate function [`aca6265`](https://github.com/inspect-js/object-inspect/commit/aca626536eaeef697196c6e9db3e90e7e0355b6a) +- [Refactor] consolidate wrapping logic for boxed primitives into a function. [`4e440cd`](https://github.com/inspect-js/object-inspect/commit/4e440cd9065df04802a2a1dead03f48c353ca301) +- [Robustness] use `typeof` instead of comparing to literal `undefined` [`5ca6f60`](https://github.com/inspect-js/object-inspect/commit/5ca6f601937506daff8ed2fcf686363b55807b69) +- [Refactor] consolidate Map/Set notations. [`4e576e5`](https://github.com/inspect-js/object-inspect/commit/4e576e5d7ed2f9ec3fb7f37a0d16732eb10758a9) +- [Tests] ensure that this function remains anonymous, despite ES6 name inference. [`7540ae5`](https://github.com/inspect-js/object-inspect/commit/7540ae591278756db614fa4def55ca413150e1a3) +- [Refactor] explicitly coerce Error objects to strings. [`7f4ca84`](https://github.com/inspect-js/object-inspect/commit/7f4ca8424ee8dc2c0ca5a422d94f7fac40327261) +- [Refactor] split up `var` declarations for debuggability [`6f2c11e`](https://github.com/inspect-js/object-inspect/commit/6f2c11e6a85418586a00292dcec5e97683f89bc3) +- [Robustness] cache `Object.prototype.toString` [`df44a20`](https://github.com/inspect-js/object-inspect/commit/df44a20adfccf31529d60d1df2079bfc3c836e27) +- [Dev Deps] update `tape` [`3ec714e`](https://github.com/inspect-js/object-inspect/commit/3ec714eba57bc3f58a6eb4fca1376f49e70d300a) +- [Dev Deps] update `tape` [`beb72d9`](https://github.com/inspect-js/object-inspect/commit/beb72d969653747d7cde300393c28755375329b0) + +## [v1.2.1](https://github.com/inspect-js/object-inspect/compare/v1.2.0...v1.2.1) - 2016-04-09 + +### Fixed + +- [Fix] fix Boolean `false` object inspection. [`#7`](https://github.com/substack/object-inspect/pull/7) + +## [v1.2.0](https://github.com/inspect-js/object-inspect/compare/v1.1.0...v1.2.0) - 2016-04-09 + +### Fixed + +- [New] add support for inspecting String/Number/Boolean objects. [`#6`](https://github.com/inspect-js/object-inspect/issues/6) + +### Commits + +- [Dev Deps] update `tape` [`742caa2`](https://github.com/inspect-js/object-inspect/commit/742caa262cf7af4c815d4821c8bd0129c1446432) + +## [v1.1.0](https://github.com/inspect-js/object-inspect/compare/1.0.2...v1.1.0) - 2015-12-14 + +### Merged + +- [New] add ES6 Map/Set support. [`#4`](https://github.com/inspect-js/object-inspect/pull/4) + +### Fixed + +- [New] add ES6 Map/Set support. [`#3`](https://github.com/inspect-js/object-inspect/issues/3) + +### Commits + +- Update `travis.yml` to test on bunches of `iojs` and `node` versions. [`4c1fd65`](https://github.com/inspect-js/object-inspect/commit/4c1fd65cc3bd95307e854d114b90478324287fd2) +- [Dev Deps] update `tape` [`88a907e`](https://github.com/inspect-js/object-inspect/commit/88a907e33afbe408e4b5d6e4e42a33143f88848c) + +## [1.0.2](https://github.com/inspect-js/object-inspect/compare/1.0.1...1.0.2) - 2015-08-07 + +### Commits + +- [Fix] Cache `Object.prototype.hasOwnProperty` in case it's deleted later. [`1d0075d`](https://github.com/inspect-js/object-inspect/commit/1d0075d3091dc82246feeb1f9871cb2b8ed227b3) +- [Dev Deps] Update `tape` [`ca8d5d7`](https://github.com/inspect-js/object-inspect/commit/ca8d5d75635ddbf76f944e628267581e04958457) +- gitignore node_modules since this is a reusable modules. [`ed41407`](https://github.com/inspect-js/object-inspect/commit/ed41407811743ca530cdeb28f982beb96026af82) + +## [1.0.1](https://github.com/inspect-js/object-inspect/compare/1.0.0...1.0.1) - 2015-07-19 + +### Commits + +- Make `inspect` work with symbol primitives and objects, including in node 0.11 and 0.12. [`ddf1b94`](https://github.com/inspect-js/object-inspect/commit/ddf1b94475ab951f1e3bccdc0a48e9073cfbfef4) +- bump tape [`103d674`](https://github.com/inspect-js/object-inspect/commit/103d67496b504bdcfdd765d303a773f87ec106e2) +- use newer travis config [`d497276`](https://github.com/inspect-js/object-inspect/commit/d497276c1da14234bb5098a59cf20de75fbc316a) + +## [1.0.0](https://github.com/inspect-js/object-inspect/compare/0.4.0...1.0.0) - 2014-08-05 + +### Commits + +- error inspect works properly [`260a22d`](https://github.com/inspect-js/object-inspect/commit/260a22d134d3a8a482c67d52091c6040c34f4299) +- seen coverage [`57269e8`](https://github.com/inspect-js/object-inspect/commit/57269e8baa992a7439047f47325111fdcbcb8417) +- htmlelement instance coverage [`397ffe1`](https://github.com/inspect-js/object-inspect/commit/397ffe10a1980350868043ef9de65686d438979f) +- more element coverage [`6905cc2`](https://github.com/inspect-js/object-inspect/commit/6905cc2f7df35600177e613b0642b4df5efd3eca) +- failing test for type errors [`385b615`](https://github.com/inspect-js/object-inspect/commit/385b6152e49b51b68449a662f410b084ed7c601a) +- fn name coverage [`edc906d`](https://github.com/inspect-js/object-inspect/commit/edc906d40fca6b9194d304062c037ee8e398c4c2) +- server-side element test [`362d1d3`](https://github.com/inspect-js/object-inspect/commit/362d1d3e86f187651c29feeb8478110afada385b) +- custom inspect fn [`e89b0f6`](https://github.com/inspect-js/object-inspect/commit/e89b0f6fe6d5e03681282af83732a509160435a6) +- fixed browser test [`b530882`](https://github.com/inspect-js/object-inspect/commit/b5308824a1c8471c5617e394766a03a6977102a9) +- depth test, matches node [`1cfd9e0`](https://github.com/inspect-js/object-inspect/commit/1cfd9e0285a4ae1dff44101ad482915d9bf47e48) +- exercise hasOwnProperty path [`8d753fb`](https://github.com/inspect-js/object-inspect/commit/8d753fb362a534fa1106e4d80f2ee9bea06a66d9) +- more cases covered for errors [`c5c46a5`](https://github.com/inspect-js/object-inspect/commit/c5c46a569ec4606583497e8550f0d8c7ad39a4a4) +- \W obj key test case [`b0eceee`](https://github.com/inspect-js/object-inspect/commit/b0eceeea6e0eb94d686c1046e99b9e25e5005f75) +- coverage for explicit depth param [`e12b91c`](https://github.com/inspect-js/object-inspect/commit/e12b91cd59683362f3a0e80f46481a0211e26c15) + +## [0.4.0](https://github.com/inspect-js/object-inspect/compare/0.3.1...0.4.0) - 2014-03-21 + +### Commits + +- passing lowbyte interpolation test [`b847511`](https://github.com/inspect-js/object-inspect/commit/b8475114f5def7e7961c5353d48d3d8d9a520985) +- lowbyte test [`4a2b0e1`](https://github.com/inspect-js/object-inspect/commit/4a2b0e142667fc933f195472759385ac08f3946c) + +## [0.3.1](https://github.com/inspect-js/object-inspect/compare/0.3.0...0.3.1) - 2014-03-04 + +### Commits + +- sort keys [`a07b19c`](https://github.com/inspect-js/object-inspect/commit/a07b19cc3b1521a82d4fafb6368b7a9775428a05) + +## [0.3.0](https://github.com/inspect-js/object-inspect/compare/0.2.0...0.3.0) - 2014-03-04 + +### Commits + +- [] and {} instead of [ ] and { } [`654c44b`](https://github.com/inspect-js/object-inspect/commit/654c44b2865811f3519e57bb8526e0821caf5c6b) + +## [0.2.0](https://github.com/inspect-js/object-inspect/compare/0.1.3...0.2.0) - 2014-03-04 + +### Commits + +- failing holes test [`99cdfad`](https://github.com/inspect-js/object-inspect/commit/99cdfad03c6474740275a75636fe6ca86c77737a) +- regex already work [`e324033`](https://github.com/inspect-js/object-inspect/commit/e324033267025995ec97d32ed0a65737c99477a6) +- failing undef/null test [`1f88a00`](https://github.com/inspect-js/object-inspect/commit/1f88a00265d3209719dda8117b7e6360b4c20943) +- holes in the all example [`7d345f3`](https://github.com/inspect-js/object-inspect/commit/7d345f3676dcbe980cff89a4f6c243269ebbb709) +- check for .inspect(), fixes Buffer use-case [`c3f7546`](https://github.com/inspect-js/object-inspect/commit/c3f75466dbca125347d49847c05262c292f12b79) +- fixes for holes [`ce25f73`](https://github.com/inspect-js/object-inspect/commit/ce25f736683de4b92ff27dc5471218415e2d78d8) +- weird null behavior [`405c1ea`](https://github.com/inspect-js/object-inspect/commit/405c1ea72cd5a8cf3b498c3eaa903d01b9fbcab5) +- tape is actually a devDependency, upgrade [`703b0ce`](https://github.com/inspect-js/object-inspect/commit/703b0ce6c5817b4245a082564bccd877e0bb6990) +- put date in the example [`a342219`](https://github.com/inspect-js/object-inspect/commit/a3422190eeaa013215f46df2d0d37b48595ac058) +- passing the null test [`4ab737e`](https://github.com/inspect-js/object-inspect/commit/4ab737ebf862a75d247ebe51e79307a34d6380d4) + +## [0.1.3](https://github.com/inspect-js/object-inspect/compare/0.1.1...0.1.3) - 2013-07-26 + +### Commits + +- special isElement() check [`882768a`](https://github.com/inspect-js/object-inspect/commit/882768a54035d30747be9de1baf14e5aa0daa128) +- oh right old IEs don't have indexOf either [`36d1275`](https://github.com/inspect-js/object-inspect/commit/36d12756c38b08a74370b0bb696c809e529913a5) + +## [0.1.1](https://github.com/inspect-js/object-inspect/compare/0.1.0...0.1.1) - 2013-07-26 + +### Commits + +- tests! [`4422fd9`](https://github.com/inspect-js/object-inspect/commit/4422fd95532c2745aa6c4f786f35f1090be29998) +- fix for ie<9, doesn't have hasOwnProperty [`6b7d611`](https://github.com/inspect-js/object-inspect/commit/6b7d61183050f6da801ea04473211da226482613) +- fix for all IEs: no f.name [`4e0c2f6`](https://github.com/inspect-js/object-inspect/commit/4e0c2f6dfd01c306d067d7163319acc97c94ee50) +- badges [`5ed0d88`](https://github.com/inspect-js/object-inspect/commit/5ed0d88e4e407f9cb327fa4a146c17921f9680f3) + +## [0.1.0](https://github.com/inspect-js/object-inspect/compare/0.0.0...0.1.0) - 2013-07-26 + +### Commits + +- [Function] for functions [`ad5c485`](https://github.com/inspect-js/object-inspect/commit/ad5c485098fc83352cb540a60b2548ca56820e0b) + +## 0.0.0 - 2013-07-26 + +### Commits + +- working browser example [`34be6b6`](https://github.com/inspect-js/object-inspect/commit/34be6b6548f9ce92bdc3c27572857ba0c4a1218d) +- package.json etc [`cad51f2`](https://github.com/inspect-js/object-inspect/commit/cad51f23fc6bcf1a456ed6abe16088256c2f632f) +- docs complete [`b80cce2`](https://github.com/inspect-js/object-inspect/commit/b80cce2490c4e7183a9ee11ea89071f0abec4446) +- circular example [`4b4a7b9`](https://github.com/inspect-js/object-inspect/commit/4b4a7b92209e4e6b4630976cb6bcd17d14165a59) +- string rep [`7afb479`](https://github.com/inspect-js/object-inspect/commit/7afb479baa798d27f09e0a178b72ea327f60f5c8) diff --git a/node_modules/object-inspect/LICENSE b/node_modules/object-inspect/LICENSE new file mode 100644 index 0000000..ca64cc1 --- /dev/null +++ b/node_modules/object-inspect/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 James Halliday + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/object-inspect/example/all.js b/node_modules/object-inspect/example/all.js new file mode 100644 index 0000000..2f3355c --- /dev/null +++ b/node_modules/object-inspect/example/all.js @@ -0,0 +1,23 @@ +'use strict'; + +var inspect = require('../'); +var Buffer = require('safer-buffer').Buffer; + +var holes = ['a', 'b']; +holes[4] = 'e'; +holes[6] = 'g'; + +var obj = { + a: 1, + b: [3, 4, undefined, null], + c: undefined, + d: null, + e: { + regex: /^x/i, + buf: Buffer.from('abc'), + holes: holes + }, + now: new Date() +}; +obj.self = obj; +console.log(inspect(obj)); diff --git a/node_modules/object-inspect/example/circular.js b/node_modules/object-inspect/example/circular.js new file mode 100644 index 0000000..487a7c1 --- /dev/null +++ b/node_modules/object-inspect/example/circular.js @@ -0,0 +1,6 @@ +'use strict'; + +var inspect = require('../'); +var obj = { a: 1, b: [3, 4] }; +obj.c = obj; +console.log(inspect(obj)); diff --git a/node_modules/object-inspect/example/fn.js b/node_modules/object-inspect/example/fn.js new file mode 100644 index 0000000..9b5db8d --- /dev/null +++ b/node_modules/object-inspect/example/fn.js @@ -0,0 +1,5 @@ +'use strict'; + +var inspect = require('../'); +var obj = [1, 2, function f(n) { return n + 5; }, 4]; +console.log(inspect(obj)); diff --git a/node_modules/object-inspect/example/inspect.js b/node_modules/object-inspect/example/inspect.js new file mode 100644 index 0000000..e2df7c9 --- /dev/null +++ b/node_modules/object-inspect/example/inspect.js @@ -0,0 +1,10 @@ +'use strict'; + +/* eslint-env browser */ +var inspect = require('../'); + +var d = document.createElement('div'); +d.setAttribute('id', 'beep'); +d.innerHTML = 'woooiiiii'; + +console.log(inspect([d, { a: 3, b: 4, c: [5, 6, [7, [8, [9]]]] }])); diff --git a/node_modules/object-inspect/index.js b/node_modules/object-inspect/index.js new file mode 100644 index 0000000..7ab98a6 --- /dev/null +++ b/node_modules/object-inspect/index.js @@ -0,0 +1,512 @@ +var hasMap = typeof Map === 'function' && Map.prototype; +var mapSizeDescriptor = Object.getOwnPropertyDescriptor && hasMap ? Object.getOwnPropertyDescriptor(Map.prototype, 'size') : null; +var mapSize = hasMap && mapSizeDescriptor && typeof mapSizeDescriptor.get === 'function' ? mapSizeDescriptor.get : null; +var mapForEach = hasMap && Map.prototype.forEach; +var hasSet = typeof Set === 'function' && Set.prototype; +var setSizeDescriptor = Object.getOwnPropertyDescriptor && hasSet ? Object.getOwnPropertyDescriptor(Set.prototype, 'size') : null; +var setSize = hasSet && setSizeDescriptor && typeof setSizeDescriptor.get === 'function' ? setSizeDescriptor.get : null; +var setForEach = hasSet && Set.prototype.forEach; +var hasWeakMap = typeof WeakMap === 'function' && WeakMap.prototype; +var weakMapHas = hasWeakMap ? WeakMap.prototype.has : null; +var hasWeakSet = typeof WeakSet === 'function' && WeakSet.prototype; +var weakSetHas = hasWeakSet ? WeakSet.prototype.has : null; +var hasWeakRef = typeof WeakRef === 'function' && WeakRef.prototype; +var weakRefDeref = hasWeakRef ? WeakRef.prototype.deref : null; +var booleanValueOf = Boolean.prototype.valueOf; +var objectToString = Object.prototype.toString; +var functionToString = Function.prototype.toString; +var $match = String.prototype.match; +var $slice = String.prototype.slice; +var $replace = String.prototype.replace; +var $toUpperCase = String.prototype.toUpperCase; +var $toLowerCase = String.prototype.toLowerCase; +var $test = RegExp.prototype.test; +var $concat = Array.prototype.concat; +var $join = Array.prototype.join; +var $arrSlice = Array.prototype.slice; +var $floor = Math.floor; +var bigIntValueOf = typeof BigInt === 'function' ? BigInt.prototype.valueOf : null; +var gOPS = Object.getOwnPropertySymbols; +var symToString = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ? Symbol.prototype.toString : null; +var hasShammedSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'object'; +// ie, `has-tostringtag/shams +var toStringTag = typeof Symbol === 'function' && Symbol.toStringTag && (typeof Symbol.toStringTag === hasShammedSymbols ? 'object' : 'symbol') + ? Symbol.toStringTag + : null; +var isEnumerable = Object.prototype.propertyIsEnumerable; + +var gPO = (typeof Reflect === 'function' ? Reflect.getPrototypeOf : Object.getPrototypeOf) || ( + [].__proto__ === Array.prototype // eslint-disable-line no-proto + ? function (O) { + return O.__proto__; // eslint-disable-line no-proto + } + : null +); + +function addNumericSeparator(num, str) { + if ( + num === Infinity + || num === -Infinity + || num !== num + || (num && num > -1000 && num < 1000) + || $test.call(/e/, str) + ) { + return str; + } + var sepRegex = /[0-9](?=(?:[0-9]{3})+(?![0-9]))/g; + if (typeof num === 'number') { + var int = num < 0 ? -$floor(-num) : $floor(num); // trunc(num) + if (int !== num) { + var intStr = String(int); + var dec = $slice.call(str, intStr.length + 1); + return $replace.call(intStr, sepRegex, '$&_') + '.' + $replace.call($replace.call(dec, /([0-9]{3})/g, '$&_'), /_$/, ''); + } + } + return $replace.call(str, sepRegex, '$&_'); +} + +var utilInspect = require('./util.inspect'); +var inspectCustom = utilInspect.custom; +var inspectSymbol = isSymbol(inspectCustom) ? inspectCustom : null; + +module.exports = function inspect_(obj, options, depth, seen) { + var opts = options || {}; + + if (has(opts, 'quoteStyle') && (opts.quoteStyle !== 'single' && opts.quoteStyle !== 'double')) { + throw new TypeError('option "quoteStyle" must be "single" or "double"'); + } + if ( + has(opts, 'maxStringLength') && (typeof opts.maxStringLength === 'number' + ? opts.maxStringLength < 0 && opts.maxStringLength !== Infinity + : opts.maxStringLength !== null + ) + ) { + throw new TypeError('option "maxStringLength", if provided, must be a positive integer, Infinity, or `null`'); + } + var customInspect = has(opts, 'customInspect') ? opts.customInspect : true; + if (typeof customInspect !== 'boolean' && customInspect !== 'symbol') { + throw new TypeError('option "customInspect", if provided, must be `true`, `false`, or `\'symbol\'`'); + } + + if ( + has(opts, 'indent') + && opts.indent !== null + && opts.indent !== '\t' + && !(parseInt(opts.indent, 10) === opts.indent && opts.indent > 0) + ) { + throw new TypeError('option "indent" must be "\\t", an integer > 0, or `null`'); + } + if (has(opts, 'numericSeparator') && typeof opts.numericSeparator !== 'boolean') { + throw new TypeError('option "numericSeparator", if provided, must be `true` or `false`'); + } + var numericSeparator = opts.numericSeparator; + + if (typeof obj === 'undefined') { + return 'undefined'; + } + if (obj === null) { + return 'null'; + } + if (typeof obj === 'boolean') { + return obj ? 'true' : 'false'; + } + + if (typeof obj === 'string') { + return inspectString(obj, opts); + } + if (typeof obj === 'number') { + if (obj === 0) { + return Infinity / obj > 0 ? '0' : '-0'; + } + var str = String(obj); + return numericSeparator ? addNumericSeparator(obj, str) : str; + } + if (typeof obj === 'bigint') { + var bigIntStr = String(obj) + 'n'; + return numericSeparator ? addNumericSeparator(obj, bigIntStr) : bigIntStr; + } + + var maxDepth = typeof opts.depth === 'undefined' ? 5 : opts.depth; + if (typeof depth === 'undefined') { depth = 0; } + if (depth >= maxDepth && maxDepth > 0 && typeof obj === 'object') { + return isArray(obj) ? '[Array]' : '[Object]'; + } + + var indent = getIndent(opts, depth); + + if (typeof seen === 'undefined') { + seen = []; + } else if (indexOf(seen, obj) >= 0) { + return '[Circular]'; + } + + function inspect(value, from, noIndent) { + if (from) { + seen = $arrSlice.call(seen); + seen.push(from); + } + if (noIndent) { + var newOpts = { + depth: opts.depth + }; + if (has(opts, 'quoteStyle')) { + newOpts.quoteStyle = opts.quoteStyle; + } + return inspect_(value, newOpts, depth + 1, seen); + } + return inspect_(value, opts, depth + 1, seen); + } + + if (typeof obj === 'function' && !isRegExp(obj)) { // in older engines, regexes are callable + var name = nameOf(obj); + var keys = arrObjKeys(obj, inspect); + return '[Function' + (name ? ': ' + name : ' (anonymous)') + ']' + (keys.length > 0 ? ' { ' + $join.call(keys, ', ') + ' }' : ''); + } + if (isSymbol(obj)) { + var symString = hasShammedSymbols ? $replace.call(String(obj), /^(Symbol\(.*\))_[^)]*$/, '$1') : symToString.call(obj); + return typeof obj === 'object' && !hasShammedSymbols ? markBoxed(symString) : symString; + } + if (isElement(obj)) { + var s = '<' + $toLowerCase.call(String(obj.nodeName)); + var attrs = obj.attributes || []; + for (var i = 0; i < attrs.length; i++) { + s += ' ' + attrs[i].name + '=' + wrapQuotes(quote(attrs[i].value), 'double', opts); + } + s += '>'; + if (obj.childNodes && obj.childNodes.length) { s += '...'; } + s += ''; + return s; + } + if (isArray(obj)) { + if (obj.length === 0) { return '[]'; } + var xs = arrObjKeys(obj, inspect); + if (indent && !singleLineValues(xs)) { + return '[' + indentedJoin(xs, indent) + ']'; + } + return '[ ' + $join.call(xs, ', ') + ' ]'; + } + if (isError(obj)) { + var parts = arrObjKeys(obj, inspect); + if (!('cause' in Error.prototype) && 'cause' in obj && !isEnumerable.call(obj, 'cause')) { + return '{ [' + String(obj) + '] ' + $join.call($concat.call('[cause]: ' + inspect(obj.cause), parts), ', ') + ' }'; + } + if (parts.length === 0) { return '[' + String(obj) + ']'; } + return '{ [' + String(obj) + '] ' + $join.call(parts, ', ') + ' }'; + } + if (typeof obj === 'object' && customInspect) { + if (inspectSymbol && typeof obj[inspectSymbol] === 'function' && utilInspect) { + return utilInspect(obj, { depth: maxDepth - depth }); + } else if (customInspect !== 'symbol' && typeof obj.inspect === 'function') { + return obj.inspect(); + } + } + if (isMap(obj)) { + var mapParts = []; + mapForEach.call(obj, function (value, key) { + mapParts.push(inspect(key, obj, true) + ' => ' + inspect(value, obj)); + }); + return collectionOf('Map', mapSize.call(obj), mapParts, indent); + } + if (isSet(obj)) { + var setParts = []; + setForEach.call(obj, function (value) { + setParts.push(inspect(value, obj)); + }); + return collectionOf('Set', setSize.call(obj), setParts, indent); + } + if (isWeakMap(obj)) { + return weakCollectionOf('WeakMap'); + } + if (isWeakSet(obj)) { + return weakCollectionOf('WeakSet'); + } + if (isWeakRef(obj)) { + return weakCollectionOf('WeakRef'); + } + if (isNumber(obj)) { + return markBoxed(inspect(Number(obj))); + } + if (isBigInt(obj)) { + return markBoxed(inspect(bigIntValueOf.call(obj))); + } + if (isBoolean(obj)) { + return markBoxed(booleanValueOf.call(obj)); + } + if (isString(obj)) { + return markBoxed(inspect(String(obj))); + } + if (!isDate(obj) && !isRegExp(obj)) { + var ys = arrObjKeys(obj, inspect); + var isPlainObject = gPO ? gPO(obj) === Object.prototype : obj instanceof Object || obj.constructor === Object; + var protoTag = obj instanceof Object ? '' : 'null prototype'; + var stringTag = !isPlainObject && toStringTag && Object(obj) === obj && toStringTag in obj ? $slice.call(toStr(obj), 8, -1) : protoTag ? 'Object' : ''; + var constructorTag = isPlainObject || typeof obj.constructor !== 'function' ? '' : obj.constructor.name ? obj.constructor.name + ' ' : ''; + var tag = constructorTag + (stringTag || protoTag ? '[' + $join.call($concat.call([], stringTag || [], protoTag || []), ': ') + '] ' : ''); + if (ys.length === 0) { return tag + '{}'; } + if (indent) { + return tag + '{' + indentedJoin(ys, indent) + '}'; + } + return tag + '{ ' + $join.call(ys, ', ') + ' }'; + } + return String(obj); +}; + +function wrapQuotes(s, defaultStyle, opts) { + var quoteChar = (opts.quoteStyle || defaultStyle) === 'double' ? '"' : "'"; + return quoteChar + s + quoteChar; +} + +function quote(s) { + return $replace.call(String(s), /"/g, '"'); +} + +function isArray(obj) { return toStr(obj) === '[object Array]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isDate(obj) { return toStr(obj) === '[object Date]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isRegExp(obj) { return toStr(obj) === '[object RegExp]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isError(obj) { return toStr(obj) === '[object Error]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isString(obj) { return toStr(obj) === '[object String]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isNumber(obj) { return toStr(obj) === '[object Number]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isBoolean(obj) { return toStr(obj) === '[object Boolean]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } + +// Symbol and BigInt do have Symbol.toStringTag by spec, so that can't be used to eliminate false positives +function isSymbol(obj) { + if (hasShammedSymbols) { + return obj && typeof obj === 'object' && obj instanceof Symbol; + } + if (typeof obj === 'symbol') { + return true; + } + if (!obj || typeof obj !== 'object' || !symToString) { + return false; + } + try { + symToString.call(obj); + return true; + } catch (e) {} + return false; +} + +function isBigInt(obj) { + if (!obj || typeof obj !== 'object' || !bigIntValueOf) { + return false; + } + try { + bigIntValueOf.call(obj); + return true; + } catch (e) {} + return false; +} + +var hasOwn = Object.prototype.hasOwnProperty || function (key) { return key in this; }; +function has(obj, key) { + return hasOwn.call(obj, key); +} + +function toStr(obj) { + return objectToString.call(obj); +} + +function nameOf(f) { + if (f.name) { return f.name; } + var m = $match.call(functionToString.call(f), /^function\s*([\w$]+)/); + if (m) { return m[1]; } + return null; +} + +function indexOf(xs, x) { + if (xs.indexOf) { return xs.indexOf(x); } + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) { return i; } + } + return -1; +} + +function isMap(x) { + if (!mapSize || !x || typeof x !== 'object') { + return false; + } + try { + mapSize.call(x); + try { + setSize.call(x); + } catch (s) { + return true; + } + return x instanceof Map; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isWeakMap(x) { + if (!weakMapHas || !x || typeof x !== 'object') { + return false; + } + try { + weakMapHas.call(x, weakMapHas); + try { + weakSetHas.call(x, weakSetHas); + } catch (s) { + return true; + } + return x instanceof WeakMap; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isWeakRef(x) { + if (!weakRefDeref || !x || typeof x !== 'object') { + return false; + } + try { + weakRefDeref.call(x); + return true; + } catch (e) {} + return false; +} + +function isSet(x) { + if (!setSize || !x || typeof x !== 'object') { + return false; + } + try { + setSize.call(x); + try { + mapSize.call(x); + } catch (m) { + return true; + } + return x instanceof Set; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isWeakSet(x) { + if (!weakSetHas || !x || typeof x !== 'object') { + return false; + } + try { + weakSetHas.call(x, weakSetHas); + try { + weakMapHas.call(x, weakMapHas); + } catch (s) { + return true; + } + return x instanceof WeakSet; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isElement(x) { + if (!x || typeof x !== 'object') { return false; } + if (typeof HTMLElement !== 'undefined' && x instanceof HTMLElement) { + return true; + } + return typeof x.nodeName === 'string' && typeof x.getAttribute === 'function'; +} + +function inspectString(str, opts) { + if (str.length > opts.maxStringLength) { + var remaining = str.length - opts.maxStringLength; + var trailer = '... ' + remaining + ' more character' + (remaining > 1 ? 's' : ''); + return inspectString($slice.call(str, 0, opts.maxStringLength), opts) + trailer; + } + // eslint-disable-next-line no-control-regex + var s = $replace.call($replace.call(str, /(['\\])/g, '\\$1'), /[\x00-\x1f]/g, lowbyte); + return wrapQuotes(s, 'single', opts); +} + +function lowbyte(c) { + var n = c.charCodeAt(0); + var x = { + 8: 'b', + 9: 't', + 10: 'n', + 12: 'f', + 13: 'r' + }[n]; + if (x) { return '\\' + x; } + return '\\x' + (n < 0x10 ? '0' : '') + $toUpperCase.call(n.toString(16)); +} + +function markBoxed(str) { + return 'Object(' + str + ')'; +} + +function weakCollectionOf(type) { + return type + ' { ? }'; +} + +function collectionOf(type, size, entries, indent) { + var joinedEntries = indent ? indentedJoin(entries, indent) : $join.call(entries, ', '); + return type + ' (' + size + ') {' + joinedEntries + '}'; +} + +function singleLineValues(xs) { + for (var i = 0; i < xs.length; i++) { + if (indexOf(xs[i], '\n') >= 0) { + return false; + } + } + return true; +} + +function getIndent(opts, depth) { + var baseIndent; + if (opts.indent === '\t') { + baseIndent = '\t'; + } else if (typeof opts.indent === 'number' && opts.indent > 0) { + baseIndent = $join.call(Array(opts.indent + 1), ' '); + } else { + return null; + } + return { + base: baseIndent, + prev: $join.call(Array(depth + 1), baseIndent) + }; +} + +function indentedJoin(xs, indent) { + if (xs.length === 0) { return ''; } + var lineJoiner = '\n' + indent.prev + indent.base; + return lineJoiner + $join.call(xs, ',' + lineJoiner) + '\n' + indent.prev; +} + +function arrObjKeys(obj, inspect) { + var isArr = isArray(obj); + var xs = []; + if (isArr) { + xs.length = obj.length; + for (var i = 0; i < obj.length; i++) { + xs[i] = has(obj, i) ? inspect(obj[i], obj) : ''; + } + } + var syms = typeof gOPS === 'function' ? gOPS(obj) : []; + var symMap; + if (hasShammedSymbols) { + symMap = {}; + for (var k = 0; k < syms.length; k++) { + symMap['$' + syms[k]] = syms[k]; + } + } + + for (var key in obj) { // eslint-disable-line no-restricted-syntax + if (!has(obj, key)) { continue; } // eslint-disable-line no-restricted-syntax, no-continue + if (isArr && String(Number(key)) === key && key < obj.length) { continue; } // eslint-disable-line no-restricted-syntax, no-continue + if (hasShammedSymbols && symMap['$' + key] instanceof Symbol) { + // this is to prevent shammed Symbols, which are stored as strings, from being included in the string key section + continue; // eslint-disable-line no-restricted-syntax, no-continue + } else if ($test.call(/[^\w$]/, key)) { + xs.push(inspect(key, obj) + ': ' + inspect(obj[key], obj)); + } else { + xs.push(key + ': ' + inspect(obj[key], obj)); + } + } + if (typeof gOPS === 'function') { + for (var j = 0; j < syms.length; j++) { + if (isEnumerable.call(obj, syms[j])) { + xs.push('[' + inspect(syms[j]) + ']: ' + inspect(obj[syms[j]], obj)); + } + } + } + return xs; +} diff --git a/node_modules/object-inspect/package-support.json b/node_modules/object-inspect/package-support.json new file mode 100644 index 0000000..5cc12d0 --- /dev/null +++ b/node_modules/object-inspect/package-support.json @@ -0,0 +1,20 @@ +{ + "versions": [ + { + "version": "*", + "target": { + "node": "all" + }, + "response": { + "type": "time-permitting" + }, + "backing": { + "npm-funding": true, + "donations": [ + "https://github.com/ljharb", + "https://tidelift.com/funding/github/npm/object-inspect" + ] + } + } + ] +} diff --git a/node_modules/object-inspect/package.json b/node_modules/object-inspect/package.json new file mode 100644 index 0000000..7e0b87c --- /dev/null +++ b/node_modules/object-inspect/package.json @@ -0,0 +1,94 @@ +{ + "name": "object-inspect", + "version": "1.12.2", + "description": "string representations of objects in node and the browser", + "main": "index.js", + "sideEffects": false, + "devDependencies": { + "@ljharb/eslint-config": "^21.0.0", + "aud": "^2.0.0", + "auto-changelog": "^2.4.0", + "core-js": "^2.6.12", + "error-cause": "^1.0.4", + "es-value-fixtures": "^1.4.1", + "eslint": "=8.8.0", + "for-each": "^0.3.3", + "functions-have-names": "^1.2.3", + "has-tostringtag": "^1.0.0", + "make-arrow-function": "^1.2.0", + "mock-property": "^1.0.0", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "string.prototype.repeat": "^1.0.0", + "tape": "^5.5.3" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=autogenerated", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "pretest": "npm run lint", + "lint": "eslint .", + "test": "npm run tests-only && npm run test:corejs", + "tests-only": "nyc tape 'test/*.js'", + "test:corejs": "nyc tape test-core-js.js 'test/*.js'", + "posttest": "npx aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "testling": { + "files": [ + "test/*.js", + "test/browser/*.js" + ], + "browsers": [ + "ie/6..latest", + "chrome/latest", + "firefox/latest", + "safari/latest", + "opera/latest", + "iphone/latest", + "ipad/latest", + "android/latest" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/inspect-js/object-inspect.git" + }, + "homepage": "https://github.com/inspect-js/object-inspect", + "keywords": [ + "inspect", + "util.inspect", + "object", + "stringify", + "pretty" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "browser": { + "./util.inspect.js": false + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows", + "./test-core-js.js" + ] + }, + "support": true +} diff --git a/node_modules/object-inspect/readme.markdown b/node_modules/object-inspect/readme.markdown new file mode 100644 index 0000000..9ff6bec --- /dev/null +++ b/node_modules/object-inspect/readme.markdown @@ -0,0 +1,86 @@ +# object-inspect [![Version Badge][2]][1] + +string representations of objects in node and the browser + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +# example + +## circular + +``` js +var inspect = require('object-inspect'); +var obj = { a: 1, b: [3,4] }; +obj.c = obj; +console.log(inspect(obj)); +``` + +## dom element + +``` js +var inspect = require('object-inspect'); + +var d = document.createElement('div'); +d.setAttribute('id', 'beep'); +d.innerHTML = 'woooiiiii'; + +console.log(inspect([ d, { a: 3, b : 4, c: [5,6,[7,[8,[9]]]] } ])); +``` + +output: + +``` +[
...
, { a: 3, b: 4, c: [ 5, 6, [ 7, [ 8, [ ... ] ] ] ] } ] +``` + +# methods + +``` js +var inspect = require('object-inspect') +``` + +## var s = inspect(obj, opts={}) + +Return a string `s` with the string representation of `obj` up to a depth of `opts.depth`. + +Additional options: + - `quoteStyle`: must be "single" or "double", if present. Default `'single'` for strings, `'double'` for HTML elements. + - `maxStringLength`: must be `0`, a positive integer, `Infinity`, or `null`, if present. Default `Infinity`. + - `customInspect`: When `true`, a custom inspect method function will be invoked (either undere the `util.inspect.custom` symbol, or the `inspect` property). When the string `'symbol'`, only the symbol method will be invoked. Default `true`. + - `indent`: must be "\t", `null`, or a positive integer. Default `null`. + - `numericSeparator`: must be a boolean, if present. Default `false`. If `true`, all numbers will be printed with numeric separators (eg, `1234.5678` will be printed as `'1_234.567_8'`) + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install object-inspect +``` + +# license + +MIT + +[1]: https://npmjs.org/package/object-inspect +[2]: https://versionbadg.es/inspect-js/object-inspect.svg +[5]: https://david-dm.org/inspect-js/object-inspect.svg +[6]: https://david-dm.org/inspect-js/object-inspect +[7]: https://david-dm.org/inspect-js/object-inspect/dev-status.svg +[8]: https://david-dm.org/inspect-js/object-inspect#info=devDependencies +[11]: https://nodei.co/npm/object-inspect.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/object-inspect.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/object-inspect.svg +[downloads-url]: https://npm-stat.com/charts.html?package=object-inspect +[codecov-image]: https://codecov.io/gh/inspect-js/object-inspect/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/inspect-js/object-inspect/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/object-inspect +[actions-url]: https://github.com/inspect-js/object-inspect/actions diff --git a/node_modules/object-inspect/test-core-js.js b/node_modules/object-inspect/test-core-js.js new file mode 100644 index 0000000..e53c400 --- /dev/null +++ b/node_modules/object-inspect/test-core-js.js @@ -0,0 +1,26 @@ +'use strict'; + +require('core-js'); + +var inspect = require('./'); +var test = require('tape'); + +test('Maps', function (t) { + t.equal(inspect(new Map([[1, 2]])), 'Map (1) {1 => 2}'); + t.end(); +}); + +test('WeakMaps', function (t) { + t.equal(inspect(new WeakMap([[{}, 2]])), 'WeakMap { ? }'); + t.end(); +}); + +test('Sets', function (t) { + t.equal(inspect(new Set([[1, 2]])), 'Set (1) {[ 1, 2 ]}'); + t.end(); +}); + +test('WeakSets', function (t) { + t.equal(inspect(new WeakSet([[1, 2]])), 'WeakSet { ? }'); + t.end(); +}); diff --git a/node_modules/object-inspect/test/bigint.js b/node_modules/object-inspect/test/bigint.js new file mode 100644 index 0000000..4ecc31d --- /dev/null +++ b/node_modules/object-inspect/test/bigint.js @@ -0,0 +1,58 @@ +'use strict'; + +var inspect = require('../'); +var test = require('tape'); +var hasToStringTag = require('has-tostringtag/shams')(); + +test('bigint', { skip: typeof BigInt === 'undefined' }, function (t) { + t.test('primitives', function (st) { + st.plan(3); + + st.equal(inspect(BigInt(-256)), '-256n'); + st.equal(inspect(BigInt(0)), '0n'); + st.equal(inspect(BigInt(256)), '256n'); + }); + + t.test('objects', function (st) { + st.plan(3); + + st.equal(inspect(Object(BigInt(-256))), 'Object(-256n)'); + st.equal(inspect(Object(BigInt(0))), 'Object(0n)'); + st.equal(inspect(Object(BigInt(256))), 'Object(256n)'); + }); + + t.test('syntactic primitives', function (st) { + st.plan(3); + + /* eslint-disable no-new-func */ + st.equal(inspect(Function('return -256n')()), '-256n'); + st.equal(inspect(Function('return 0n')()), '0n'); + st.equal(inspect(Function('return 256n')()), '256n'); + }); + + t.test('toStringTag', { skip: !hasToStringTag }, function (st) { + st.plan(1); + + var faker = {}; + faker[Symbol.toStringTag] = 'BigInt'; + st.equal( + inspect(faker), + '{ [Symbol(Symbol.toStringTag)]: \'BigInt\' }', + 'object lying about being a BigInt inspects as an object' + ); + }); + + t.test('numericSeparator', function (st) { + st.equal(inspect(BigInt(0), { numericSeparator: false }), '0n', '0n, numericSeparator false'); + st.equal(inspect(BigInt(0), { numericSeparator: true }), '0n', '0n, numericSeparator true'); + + st.equal(inspect(BigInt(1234), { numericSeparator: false }), '1234n', '1234n, numericSeparator false'); + st.equal(inspect(BigInt(1234), { numericSeparator: true }), '1_234n', '1234n, numericSeparator true'); + st.equal(inspect(BigInt(-1234), { numericSeparator: false }), '-1234n', '1234n, numericSeparator false'); + st.equal(inspect(BigInt(-1234), { numericSeparator: true }), '-1_234n', '1234n, numericSeparator true'); + + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/object-inspect/test/browser/dom.js b/node_modules/object-inspect/test/browser/dom.js new file mode 100644 index 0000000..210c0b2 --- /dev/null +++ b/node_modules/object-inspect/test/browser/dom.js @@ -0,0 +1,15 @@ +var inspect = require('../../'); +var test = require('tape'); + +test('dom element', function (t) { + t.plan(1); + + var d = document.createElement('div'); + d.setAttribute('id', 'beep'); + d.innerHTML = 'woooiiiii'; + + t.equal( + inspect([d, { a: 3, b: 4, c: [5, 6, [7, [8, [9]]]] }]), + '[
...
, { a: 3, b: 4, c: [ 5, 6, [ 7, [ 8, [Object] ] ] ] } ]' + ); +}); diff --git a/node_modules/object-inspect/test/circular.js b/node_modules/object-inspect/test/circular.js new file mode 100644 index 0000000..5df4233 --- /dev/null +++ b/node_modules/object-inspect/test/circular.js @@ -0,0 +1,16 @@ +var inspect = require('../'); +var test = require('tape'); + +test('circular', function (t) { + t.plan(2); + var obj = { a: 1, b: [3, 4] }; + obj.c = obj; + t.equal(inspect(obj), '{ a: 1, b: [ 3, 4 ], c: [Circular] }'); + + var double = {}; + double.a = [double]; + double.b = {}; + double.b.inner = double.b; + double.b.obj = double; + t.equal(inspect(double), '{ a: [ [Circular] ], b: { inner: [Circular], obj: [Circular] } }'); +}); diff --git a/node_modules/object-inspect/test/deep.js b/node_modules/object-inspect/test/deep.js new file mode 100644 index 0000000..99ce32a --- /dev/null +++ b/node_modules/object-inspect/test/deep.js @@ -0,0 +1,12 @@ +var inspect = require('../'); +var test = require('tape'); + +test('deep', function (t) { + t.plan(4); + var obj = [[[[[[500]]]]]]; + t.equal(inspect(obj), '[ [ [ [ [ [Array] ] ] ] ] ]'); + t.equal(inspect(obj, { depth: 4 }), '[ [ [ [ [Array] ] ] ] ]'); + t.equal(inspect(obj, { depth: 2 }), '[ [ [Array] ] ]'); + + t.equal(inspect([[[{ a: 1 }]]], { depth: 3 }), '[ [ [ [Object] ] ] ]'); +}); diff --git a/node_modules/object-inspect/test/element.js b/node_modules/object-inspect/test/element.js new file mode 100644 index 0000000..47fa9e2 --- /dev/null +++ b/node_modules/object-inspect/test/element.js @@ -0,0 +1,53 @@ +var inspect = require('../'); +var test = require('tape'); + +test('element', function (t) { + t.plan(3); + var elem = { + nodeName: 'div', + attributes: [{ name: 'class', value: 'row' }], + getAttribute: function (key) { return key; }, + childNodes: [] + }; + var obj = [1, elem, 3]; + t.deepEqual(inspect(obj), '[ 1,
, 3 ]'); + t.deepEqual(inspect(obj, { quoteStyle: 'single' }), "[ 1,
, 3 ]"); + t.deepEqual(inspect(obj, { quoteStyle: 'double' }), '[ 1,
, 3 ]'); +}); + +test('element no attr', function (t) { + t.plan(1); + var elem = { + nodeName: 'div', + getAttribute: function (key) { return key; }, + childNodes: [] + }; + var obj = [1, elem, 3]; + t.deepEqual(inspect(obj), '[ 1,
, 3 ]'); +}); + +test('element with contents', function (t) { + t.plan(1); + var elem = { + nodeName: 'div', + getAttribute: function (key) { return key; }, + childNodes: [{ nodeName: 'b' }] + }; + var obj = [1, elem, 3]; + t.deepEqual(inspect(obj), '[ 1,
...
, 3 ]'); +}); + +test('element instance', function (t) { + t.plan(1); + var h = global.HTMLElement; + global.HTMLElement = function (name, attr) { + this.nodeName = name; + this.attributes = attr; + }; + global.HTMLElement.prototype.getAttribute = function () {}; + + var elem = new global.HTMLElement('div', []); + var obj = [1, elem, 3]; + t.deepEqual(inspect(obj), '[ 1,
, 3 ]'); + global.HTMLElement = h; +}); diff --git a/node_modules/object-inspect/test/err.js b/node_modules/object-inspect/test/err.js new file mode 100644 index 0000000..cc1d884 --- /dev/null +++ b/node_modules/object-inspect/test/err.js @@ -0,0 +1,48 @@ +var test = require('tape'); +var ErrorWithCause = require('error-cause/Error'); + +var inspect = require('../'); + +test('type error', function (t) { + t.plan(1); + var aerr = new TypeError(); + aerr.foo = 555; + aerr.bar = [1, 2, 3]; + + var berr = new TypeError('tuv'); + berr.baz = 555; + + var cerr = new SyntaxError(); + cerr.message = 'whoa'; + cerr['a-b'] = 5; + + var withCause = new ErrorWithCause('foo', { cause: 'bar' }); + var withCausePlus = new ErrorWithCause('foo', { cause: 'bar' }); + withCausePlus.foo = 'bar'; + var withUndefinedCause = new ErrorWithCause('foo', { cause: undefined }); + var withEnumerableCause = new Error('foo'); + withEnumerableCause.cause = 'bar'; + + var obj = [ + new TypeError(), + new TypeError('xxx'), + aerr, + berr, + cerr, + withCause, + withCausePlus, + withUndefinedCause, + withEnumerableCause + ]; + t.equal(inspect(obj), '[ ' + [ + '[TypeError]', + '[TypeError: xxx]', + '{ [TypeError] foo: 555, bar: [ 1, 2, 3 ] }', + '{ [TypeError: tuv] baz: 555 }', + '{ [SyntaxError: whoa] message: \'whoa\', \'a-b\': 5 }', + 'cause' in Error.prototype ? '[Error: foo]' : '{ [Error: foo] [cause]: \'bar\' }', + '{ [Error: foo] ' + ('cause' in Error.prototype ? '' : '[cause]: \'bar\', ') + 'foo: \'bar\' }', + 'cause' in Error.prototype ? '[Error: foo]' : '{ [Error: foo] [cause]: undefined }', + '{ [Error: foo] cause: \'bar\' }' + ].join(', ') + ' ]'); +}); diff --git a/node_modules/object-inspect/test/fakes.js b/node_modules/object-inspect/test/fakes.js new file mode 100644 index 0000000..a65c08c --- /dev/null +++ b/node_modules/object-inspect/test/fakes.js @@ -0,0 +1,29 @@ +'use strict'; + +var inspect = require('../'); +var test = require('tape'); +var hasToStringTag = require('has-tostringtag/shams')(); +var forEach = require('for-each'); + +test('fakes', { skip: !hasToStringTag }, function (t) { + forEach([ + 'Array', + 'Boolean', + 'Date', + 'Error', + 'Number', + 'RegExp', + 'String' + ], function (expected) { + var faker = {}; + faker[Symbol.toStringTag] = expected; + + t.equal( + inspect(faker), + '{ [Symbol(Symbol.toStringTag)]: \'' + expected + '\' }', + 'faker masquerading as ' + expected + ' is not shown as one' + ); + }); + + t.end(); +}); diff --git a/node_modules/object-inspect/test/fn.js b/node_modules/object-inspect/test/fn.js new file mode 100644 index 0000000..de3ca62 --- /dev/null +++ b/node_modules/object-inspect/test/fn.js @@ -0,0 +1,76 @@ +var inspect = require('../'); +var test = require('tape'); +var arrow = require('make-arrow-function')(); +var functionsHaveConfigurableNames = require('functions-have-names').functionsHaveConfigurableNames(); + +test('function', function (t) { + t.plan(1); + var obj = [1, 2, function f(n) { return n; }, 4]; + t.equal(inspect(obj), '[ 1, 2, [Function: f], 4 ]'); +}); + +test('function name', function (t) { + t.plan(1); + var f = (function () { + return function () {}; + }()); + f.toString = function toStr() { return 'function xxx () {}'; }; + var obj = [1, 2, f, 4]; + t.equal(inspect(obj), '[ 1, 2, [Function (anonymous)] { toString: [Function: toStr] }, 4 ]'); +}); + +test('anon function', function (t) { + var f = (function () { + return function () {}; + }()); + var obj = [1, 2, f, 4]; + t.equal(inspect(obj), '[ 1, 2, [Function (anonymous)], 4 ]'); + + t.end(); +}); + +test('arrow function', { skip: !arrow }, function (t) { + t.equal(inspect(arrow), '[Function (anonymous)]'); + + t.end(); +}); + +test('truly nameless function', { skip: !arrow || !functionsHaveConfigurableNames }, function (t) { + function f() {} + Object.defineProperty(f, 'name', { value: false }); + t.equal(f.name, false); + t.equal( + inspect(f), + '[Function: f]', + 'named function with falsy `.name` does not hide its original name' + ); + + function g() {} + Object.defineProperty(g, 'name', { value: true }); + t.equal(g.name, true); + t.equal( + inspect(g), + '[Function: true]', + 'named function with truthy `.name` hides its original name' + ); + + var anon = function () {}; // eslint-disable-line func-style + Object.defineProperty(anon, 'name', { value: null }); + t.equal(anon.name, null); + t.equal( + inspect(anon), + '[Function (anonymous)]', + 'anon function with falsy `.name` does not hide its anonymity' + ); + + var anon2 = function () {}; // eslint-disable-line func-style + Object.defineProperty(anon2, 'name', { value: 1 }); + t.equal(anon2.name, 1); + t.equal( + inspect(anon2), + '[Function: 1]', + 'anon function with truthy `.name` hides its anonymity' + ); + + t.end(); +}); diff --git a/node_modules/object-inspect/test/has.js b/node_modules/object-inspect/test/has.js new file mode 100644 index 0000000..01800de --- /dev/null +++ b/node_modules/object-inspect/test/has.js @@ -0,0 +1,15 @@ +'use strict'; + +var inspect = require('../'); +var test = require('tape'); +var mockProperty = require('mock-property'); + +test('when Object#hasOwnProperty is deleted', function (t) { + t.plan(1); + var arr = [1, , 3]; // eslint-disable-line no-sparse-arrays + + t.teardown(mockProperty(Array.prototype, 1, { value: 2 })); // this is needed to account for "in" vs "hasOwnProperty" + t.teardown(mockProperty(Object.prototype, 'hasOwnProperty', { 'delete': true })); + + t.equal(inspect(arr), '[ 1, , 3 ]'); +}); diff --git a/node_modules/object-inspect/test/holes.js b/node_modules/object-inspect/test/holes.js new file mode 100644 index 0000000..87fc8c8 --- /dev/null +++ b/node_modules/object-inspect/test/holes.js @@ -0,0 +1,15 @@ +var test = require('tape'); +var inspect = require('../'); + +var xs = ['a', 'b']; +xs[5] = 'f'; +xs[7] = 'j'; +xs[8] = 'k'; + +test('holes', function (t) { + t.plan(1); + t.equal( + inspect(xs), + "[ 'a', 'b', , , , 'f', , 'j', 'k' ]" + ); +}); diff --git a/node_modules/object-inspect/test/indent-option.js b/node_modules/object-inspect/test/indent-option.js new file mode 100644 index 0000000..89d8fce --- /dev/null +++ b/node_modules/object-inspect/test/indent-option.js @@ -0,0 +1,271 @@ +var test = require('tape'); +var forEach = require('for-each'); + +var inspect = require('../'); + +test('bad indent options', function (t) { + forEach([ + undefined, + true, + false, + -1, + 1.2, + Infinity, + -Infinity, + NaN + ], function (indent) { + t['throws']( + function () { inspect('', { indent: indent }); }, + TypeError, + inspect(indent) + ' is invalid' + ); + }); + + t.end(); +}); + +test('simple object with indent', function (t) { + t.plan(2); + + var obj = { a: 1, b: 2 }; + + var expectedSpaces = [ + '{', + ' a: 1,', + ' b: 2', + '}' + ].join('\n'); + var expectedTabs = [ + '{', + ' a: 1,', + ' b: 2', + '}' + ].join('\n'); + + t.equal(inspect(obj, { indent: 2 }), expectedSpaces, 'two'); + t.equal(inspect(obj, { indent: '\t' }), expectedTabs, 'tabs'); +}); + +test('two deep object with indent', function (t) { + t.plan(2); + + var obj = { a: 1, b: { c: 3, d: 4 } }; + + var expectedSpaces = [ + '{', + ' a: 1,', + ' b: {', + ' c: 3,', + ' d: 4', + ' }', + '}' + ].join('\n'); + var expectedTabs = [ + '{', + ' a: 1,', + ' b: {', + ' c: 3,', + ' d: 4', + ' }', + '}' + ].join('\n'); + + t.equal(inspect(obj, { indent: 2 }), expectedSpaces, 'two'); + t.equal(inspect(obj, { indent: '\t' }), expectedTabs, 'tabs'); +}); + +test('simple array with all single line elements', function (t) { + t.plan(2); + + var obj = [1, 2, 3, 'asdf\nsdf']; + + var expected = '[ 1, 2, 3, \'asdf\\nsdf\' ]'; + + t.equal(inspect(obj, { indent: 2 }), expected, 'two'); + t.equal(inspect(obj, { indent: '\t' }), expected, 'tabs'); +}); + +test('array with complex elements', function (t) { + t.plan(2); + + var obj = [1, { a: 1, b: { c: 1 } }, 'asdf\nsdf']; + + var expectedSpaces = [ + '[', + ' 1,', + ' {', + ' a: 1,', + ' b: {', + ' c: 1', + ' }', + ' },', + ' \'asdf\\nsdf\'', + ']' + ].join('\n'); + var expectedTabs = [ + '[', + ' 1,', + ' {', + ' a: 1,', + ' b: {', + ' c: 1', + ' }', + ' },', + ' \'asdf\\nsdf\'', + ']' + ].join('\n'); + + t.equal(inspect(obj, { indent: 2 }), expectedSpaces, 'two'); + t.equal(inspect(obj, { indent: '\t' }), expectedTabs, 'tabs'); +}); + +test('values', function (t) { + t.plan(2); + var obj = [{}, [], { 'a-b': 5 }]; + + var expectedSpaces = [ + '[', + ' {},', + ' [],', + ' {', + ' \'a-b\': 5', + ' }', + ']' + ].join('\n'); + var expectedTabs = [ + '[', + ' {},', + ' [],', + ' {', + ' \'a-b\': 5', + ' }', + ']' + ].join('\n'); + + t.equal(inspect(obj, { indent: 2 }), expectedSpaces, 'two'); + t.equal(inspect(obj, { indent: '\t' }), expectedTabs, 'tabs'); +}); + +test('Map', { skip: typeof Map !== 'function' }, function (t) { + var map = new Map(); + map.set({ a: 1 }, ['b']); + map.set(3, NaN); + + var expectedStringSpaces = [ + 'Map (2) {', + ' { a: 1 } => [ \'b\' ],', + ' 3 => NaN', + '}' + ].join('\n'); + var expectedStringTabs = [ + 'Map (2) {', + ' { a: 1 } => [ \'b\' ],', + ' 3 => NaN', + '}' + ].join('\n'); + var expectedStringTabsDoubleQuotes = [ + 'Map (2) {', + ' { a: 1 } => [ "b" ],', + ' 3 => NaN', + '}' + ].join('\n'); + + t.equal( + inspect(map, { indent: 2 }), + expectedStringSpaces, + 'Map keys are not indented (two)' + ); + t.equal( + inspect(map, { indent: '\t' }), + expectedStringTabs, + 'Map keys are not indented (tabs)' + ); + t.equal( + inspect(map, { indent: '\t', quoteStyle: 'double' }), + expectedStringTabsDoubleQuotes, + 'Map keys are not indented (tabs + double quotes)' + ); + + t.equal(inspect(new Map(), { indent: 2 }), 'Map (0) {}', 'empty Map should show as empty (two)'); + t.equal(inspect(new Map(), { indent: '\t' }), 'Map (0) {}', 'empty Map should show as empty (tabs)'); + + var nestedMap = new Map(); + nestedMap.set(nestedMap, map); + var expectedNestedSpaces = [ + 'Map (1) {', + ' [Circular] => Map (2) {', + ' { a: 1 } => [ \'b\' ],', + ' 3 => NaN', + ' }', + '}' + ].join('\n'); + var expectedNestedTabs = [ + 'Map (1) {', + ' [Circular] => Map (2) {', + ' { a: 1 } => [ \'b\' ],', + ' 3 => NaN', + ' }', + '}' + ].join('\n'); + t.equal(inspect(nestedMap, { indent: 2 }), expectedNestedSpaces, 'Map containing a Map should work (two)'); + t.equal(inspect(nestedMap, { indent: '\t' }), expectedNestedTabs, 'Map containing a Map should work (tabs)'); + + t.end(); +}); + +test('Set', { skip: typeof Set !== 'function' }, function (t) { + var set = new Set(); + set.add({ a: 1 }); + set.add(['b']); + var expectedStringSpaces = [ + 'Set (2) {', + ' {', + ' a: 1', + ' },', + ' [ \'b\' ]', + '}' + ].join('\n'); + var expectedStringTabs = [ + 'Set (2) {', + ' {', + ' a: 1', + ' },', + ' [ \'b\' ]', + '}' + ].join('\n'); + t.equal(inspect(set, { indent: 2 }), expectedStringSpaces, 'new Set([{ a: 1 }, ["b"]]) should show size and contents (two)'); + t.equal(inspect(set, { indent: '\t' }), expectedStringTabs, 'new Set([{ a: 1 }, ["b"]]) should show size and contents (tabs)'); + + t.equal(inspect(new Set(), { indent: 2 }), 'Set (0) {}', 'empty Set should show as empty (two)'); + t.equal(inspect(new Set(), { indent: '\t' }), 'Set (0) {}', 'empty Set should show as empty (tabs)'); + + var nestedSet = new Set(); + nestedSet.add(set); + nestedSet.add(nestedSet); + var expectedNestedSpaces = [ + 'Set (2) {', + ' Set (2) {', + ' {', + ' a: 1', + ' },', + ' [ \'b\' ]', + ' },', + ' [Circular]', + '}' + ].join('\n'); + var expectedNestedTabs = [ + 'Set (2) {', + ' Set (2) {', + ' {', + ' a: 1', + ' },', + ' [ \'b\' ]', + ' },', + ' [Circular]', + '}' + ].join('\n'); + t.equal(inspect(nestedSet, { indent: 2 }), expectedNestedSpaces, 'Set containing a Set should work (two)'); + t.equal(inspect(nestedSet, { indent: '\t' }), expectedNestedTabs, 'Set containing a Set should work (tabs)'); + + t.end(); +}); diff --git a/node_modules/object-inspect/test/inspect.js b/node_modules/object-inspect/test/inspect.js new file mode 100644 index 0000000..1abf81b --- /dev/null +++ b/node_modules/object-inspect/test/inspect.js @@ -0,0 +1,139 @@ +var test = require('tape'); +var hasSymbols = require('has-symbols/shams')(); +var utilInspect = require('../util.inspect'); +var repeat = require('string.prototype.repeat'); + +var inspect = require('..'); + +test('inspect', function (t) { + t.plan(5); + + var obj = [{ inspect: function xyzInspect() { return '!XYZ¡'; } }, []]; + var stringResult = '[ !XYZ¡, [] ]'; + var falseResult = '[ { inspect: [Function: xyzInspect] }, [] ]'; + + t.equal(inspect(obj), stringResult); + t.equal(inspect(obj, { customInspect: true }), stringResult); + t.equal(inspect(obj, { customInspect: 'symbol' }), falseResult); + t.equal(inspect(obj, { customInspect: false }), falseResult); + t['throws']( + function () { inspect(obj, { customInspect: 'not a boolean or "symbol"' }); }, + TypeError, + '`customInspect` must be a boolean or the string "symbol"' + ); +}); + +test('inspect custom symbol', { skip: !hasSymbols || !utilInspect || !utilInspect.custom }, function (t) { + t.plan(4); + + var obj = { inspect: function stringInspect() { return 'string'; } }; + obj[utilInspect.custom] = function custom() { return 'symbol'; }; + + var symbolResult = '[ symbol, [] ]'; + var stringResult = '[ string, [] ]'; + var falseResult = '[ { inspect: [Function: stringInspect]' + (utilInspect.custom ? ', [' + inspect(utilInspect.custom) + ']: [Function: custom]' : '') + ' }, [] ]'; + + var symbolStringFallback = utilInspect.custom ? symbolResult : stringResult; + var symbolFalseFallback = utilInspect.custom ? symbolResult : falseResult; + + t.equal(inspect([obj, []]), symbolStringFallback); + t.equal(inspect([obj, []], { customInspect: true }), symbolStringFallback); + t.equal(inspect([obj, []], { customInspect: 'symbol' }), symbolFalseFallback); + t.equal(inspect([obj, []], { customInspect: false }), falseResult); +}); + +test('symbols', { skip: !hasSymbols }, function (t) { + t.plan(2); + + var obj = { a: 1 }; + obj[Symbol('test')] = 2; + obj[Symbol.iterator] = 3; + Object.defineProperty(obj, Symbol('non-enum'), { + enumerable: false, + value: 4 + }); + + if (typeof Symbol.iterator === 'symbol') { + t.equal(inspect(obj), '{ a: 1, [Symbol(test)]: 2, [Symbol(Symbol.iterator)]: 3 }', 'object with symbols'); + t.equal(inspect([obj, []]), '[ { a: 1, [Symbol(test)]: 2, [Symbol(Symbol.iterator)]: 3 }, [] ]', 'object with symbols in array'); + } else { + // symbol sham key ordering is unreliable + t.match( + inspect(obj), + /^(?:{ a: 1, \[Symbol\(test\)\]: 2, \[Symbol\(Symbol.iterator\)\]: 3 }|{ a: 1, \[Symbol\(Symbol.iterator\)\]: 3, \[Symbol\(test\)\]: 2 })$/, + 'object with symbols (nondeterministic symbol sham key ordering)' + ); + t.match( + inspect([obj, []]), + /^\[ (?:{ a: 1, \[Symbol\(test\)\]: 2, \[Symbol\(Symbol.iterator\)\]: 3 }|{ a: 1, \[Symbol\(Symbol.iterator\)\]: 3, \[Symbol\(test\)\]: 2 }), \[\] \]$/, + 'object with symbols in array (nondeterministic symbol sham key ordering)' + ); + } +}); + +test('maxStringLength', function (t) { + t['throws']( + function () { inspect('', { maxStringLength: -1 }); }, + TypeError, + 'maxStringLength must be >= 0, or Infinity, not negative' + ); + + var str = repeat('a', 1e8); + + t.equal( + inspect([str], { maxStringLength: 10 }), + '[ \'aaaaaaaaaa\'... 99999990 more characters ]', + 'maxStringLength option limits output' + ); + + t.equal( + inspect(['f'], { maxStringLength: null }), + '[ \'\'... 1 more character ]', + 'maxStringLength option accepts `null`' + ); + + t.equal( + inspect([str], { maxStringLength: Infinity }), + '[ \'' + str + '\' ]', + 'maxStringLength option accepts ∞' + ); + + t.end(); +}); + +test('inspect options', { skip: !utilInspect.custom }, function (t) { + var obj = {}; + obj[utilInspect.custom] = function () { + return JSON.stringify(arguments); + }; + t.equal( + inspect(obj), + utilInspect(obj, { depth: 5 }), + 'custom symbols will use node\'s inspect' + ); + t.equal( + inspect(obj, { depth: 2 }), + utilInspect(obj, { depth: 2 }), + 'a reduced depth will be passed to node\'s inspect' + ); + t.equal( + inspect({ d1: obj }, { depth: 3 }), + '{ d1: ' + utilInspect(obj, { depth: 2 }) + ' }', + 'deep objects will receive a reduced depth' + ); + t.equal( + inspect({ d1: obj }, { depth: 1 }), + '{ d1: [Object] }', + 'unlike nodejs inspect, customInspect will not be used once the depth is exceeded.' + ); + t.end(); +}); + +test('inspect URL', { skip: typeof URL === 'undefined' }, function (t) { + t.match( + inspect(new URL('https://nodejs.org')), + /nodejs\.org/, // Different environments stringify it differently + 'url can be inspected' + ); + t.end(); +}); diff --git a/node_modules/object-inspect/test/lowbyte.js b/node_modules/object-inspect/test/lowbyte.js new file mode 100644 index 0000000..68a345d --- /dev/null +++ b/node_modules/object-inspect/test/lowbyte.js @@ -0,0 +1,12 @@ +var test = require('tape'); +var inspect = require('../'); + +var obj = { x: 'a\r\nb', y: '\x05! \x1f \x12' }; + +test('interpolate low bytes', function (t) { + t.plan(1); + t.equal( + inspect(obj), + "{ x: 'a\\r\\nb', y: '\\x05! \\x1F \\x12' }" + ); +}); diff --git a/node_modules/object-inspect/test/number.js b/node_modules/object-inspect/test/number.js new file mode 100644 index 0000000..8f287e8 --- /dev/null +++ b/node_modules/object-inspect/test/number.js @@ -0,0 +1,58 @@ +var test = require('tape'); +var v = require('es-value-fixtures'); +var forEach = require('for-each'); + +var inspect = require('../'); + +test('negative zero', function (t) { + t.equal(inspect(0), '0', 'inspect(0) === "0"'); + t.equal(inspect(Object(0)), 'Object(0)', 'inspect(Object(0)) === "Object(0)"'); + + t.equal(inspect(-0), '-0', 'inspect(-0) === "-0"'); + t.equal(inspect(Object(-0)), 'Object(-0)', 'inspect(Object(-0)) === "Object(-0)"'); + + t.end(); +}); + +test('numericSeparator', function (t) { + forEach(v.nonBooleans, function (nonBoolean) { + t['throws']( + function () { inspect(true, { numericSeparator: nonBoolean }); }, + TypeError, + inspect(nonBoolean) + ' is not a boolean' + ); + }); + + t.test('3 digit numbers', function (st) { + var failed = false; + for (var i = -999; i < 1000; i += 1) { + var actual = inspect(i); + var actualSepNo = inspect(i, { numericSeparator: false }); + var actualSepYes = inspect(i, { numericSeparator: true }); + var expected = String(i); + if (actual !== expected || actualSepNo !== expected || actualSepYes !== expected) { + failed = true; + t.equal(actual, expected); + t.equal(actualSepNo, expected); + t.equal(actualSepYes, expected); + } + } + + st.notOk(failed, 'all 3 digit numbers passed'); + + st.end(); + }); + + t.equal(inspect(1e3), '1000', '1000'); + t.equal(inspect(1e3, { numericSeparator: false }), '1000', '1000, numericSeparator false'); + t.equal(inspect(1e3, { numericSeparator: true }), '1_000', '1000, numericSeparator true'); + t.equal(inspect(-1e3), '-1000', '-1000'); + t.equal(inspect(-1e3, { numericSeparator: false }), '-1000', '-1000, numericSeparator false'); + t.equal(inspect(-1e3, { numericSeparator: true }), '-1_000', '-1000, numericSeparator true'); + + t.equal(inspect(1234.5678, { numericSeparator: true }), '1_234.567_8', 'fractional numbers get separators'); + t.equal(inspect(1234.56789, { numericSeparator: true }), '1_234.567_89', 'fractional numbers get separators'); + t.equal(inspect(1234.567891, { numericSeparator: true }), '1_234.567_891', 'fractional numbers get separators'); + + t.end(); +}); diff --git a/node_modules/object-inspect/test/quoteStyle.js b/node_modules/object-inspect/test/quoteStyle.js new file mode 100644 index 0000000..ae4d734 --- /dev/null +++ b/node_modules/object-inspect/test/quoteStyle.js @@ -0,0 +1,17 @@ +'use strict'; + +var inspect = require('../'); +var test = require('tape'); + +test('quoteStyle option', function (t) { + t['throws'](function () { inspect(null, { quoteStyle: false }); }, 'false is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: true }); }, 'true is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: '' }); }, '"" is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: {} }); }, '{} is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: [] }); }, '[] is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: 42 }); }, '42 is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: NaN }); }, 'NaN is not a valid value'); + t['throws'](function () { inspect(null, { quoteStyle: function () {} }); }, 'a function is not a valid value'); + + t.end(); +}); diff --git a/node_modules/object-inspect/test/toStringTag.js b/node_modules/object-inspect/test/toStringTag.js new file mode 100644 index 0000000..95f8270 --- /dev/null +++ b/node_modules/object-inspect/test/toStringTag.js @@ -0,0 +1,40 @@ +'use strict'; + +var test = require('tape'); +var hasToStringTag = require('has-tostringtag/shams')(); + +var inspect = require('../'); + +test('Symbol.toStringTag', { skip: !hasToStringTag }, function (t) { + t.plan(4); + + var obj = { a: 1 }; + t.equal(inspect(obj), '{ a: 1 }', 'object, no Symbol.toStringTag'); + + obj[Symbol.toStringTag] = 'foo'; + t.equal(inspect(obj), '{ a: 1, [Symbol(Symbol.toStringTag)]: \'foo\' }', 'object with Symbol.toStringTag'); + + t.test('null objects', { skip: 'toString' in { __proto__: null } }, function (st) { + st.plan(2); + + var dict = { __proto__: null, a: 1 }; + st.equal(inspect(dict), '[Object: null prototype] { a: 1 }', 'null object with Symbol.toStringTag'); + + dict[Symbol.toStringTag] = 'Dict'; + st.equal(inspect(dict), '[Dict: null prototype] { a: 1, [Symbol(Symbol.toStringTag)]: \'Dict\' }', 'null object with Symbol.toStringTag'); + }); + + t.test('instances', function (st) { + st.plan(4); + + function C() { + this.a = 1; + } + st.equal(Object.prototype.toString.call(new C()), '[object Object]', 'instance, no toStringTag, Object.prototype.toString'); + st.equal(inspect(new C()), 'C { a: 1 }', 'instance, no toStringTag'); + + C.prototype[Symbol.toStringTag] = 'Class!'; + st.equal(Object.prototype.toString.call(new C()), '[object Class!]', 'instance, with toStringTag, Object.prototype.toString'); + st.equal(inspect(new C()), 'C [Class!] { a: 1 }', 'instance, with toStringTag'); + }); +}); diff --git a/node_modules/object-inspect/test/undef.js b/node_modules/object-inspect/test/undef.js new file mode 100644 index 0000000..e3f4961 --- /dev/null +++ b/node_modules/object-inspect/test/undef.js @@ -0,0 +1,12 @@ +var test = require('tape'); +var inspect = require('../'); + +var obj = { a: 1, b: [3, 4, undefined, null], c: undefined, d: null }; + +test('undef and null', function (t) { + t.plan(1); + t.equal( + inspect(obj), + '{ a: 1, b: [ 3, 4, undefined, null ], c: undefined, d: null }' + ); +}); diff --git a/node_modules/object-inspect/test/values.js b/node_modules/object-inspect/test/values.js new file mode 100644 index 0000000..4832b9f --- /dev/null +++ b/node_modules/object-inspect/test/values.js @@ -0,0 +1,211 @@ +'use strict'; + +var inspect = require('../'); +var test = require('tape'); +var mockProperty = require('mock-property'); +var hasSymbols = require('has-symbols/shams')(); +var hasToStringTag = require('has-tostringtag/shams')(); + +test('values', function (t) { + t.plan(1); + var obj = [{}, [], { 'a-b': 5 }]; + t.equal(inspect(obj), '[ {}, [], { \'a-b\': 5 } ]'); +}); + +test('arrays with properties', function (t) { + t.plan(1); + var arr = [3]; + arr.foo = 'bar'; + var obj = [1, 2, arr]; + obj.baz = 'quux'; + obj.index = -1; + t.equal(inspect(obj), '[ 1, 2, [ 3, foo: \'bar\' ], baz: \'quux\', index: -1 ]'); +}); + +test('has', function (t) { + t.plan(1); + t.teardown(mockProperty(Object.prototype, 'hasOwnProperty', { 'delete': true })); + + t.equal(inspect({ a: 1, b: 2 }), '{ a: 1, b: 2 }'); +}); + +test('indexOf seen', function (t) { + t.plan(1); + var xs = [1, 2, 3, {}]; + xs.push(xs); + + var seen = []; + seen.indexOf = undefined; + + t.equal( + inspect(xs, {}, 0, seen), + '[ 1, 2, 3, {}, [Circular] ]' + ); +}); + +test('seen seen', function (t) { + t.plan(1); + var xs = [1, 2, 3]; + + var seen = [xs]; + seen.indexOf = undefined; + + t.equal( + inspect(xs, {}, 0, seen), + '[Circular]' + ); +}); + +test('seen seen seen', function (t) { + t.plan(1); + var xs = [1, 2, 3]; + + var seen = [5, xs]; + seen.indexOf = undefined; + + t.equal( + inspect(xs, {}, 0, seen), + '[Circular]' + ); +}); + +test('symbols', { skip: !hasSymbols }, function (t) { + var sym = Symbol('foo'); + t.equal(inspect(sym), 'Symbol(foo)', 'Symbol("foo") should be "Symbol(foo)"'); + if (typeof sym === 'symbol') { + // Symbol shams are incapable of differentiating boxed from unboxed symbols + t.equal(inspect(Object(sym)), 'Object(Symbol(foo))', 'Object(Symbol("foo")) should be "Object(Symbol(foo))"'); + } + + t.test('toStringTag', { skip: !hasToStringTag }, function (st) { + st.plan(1); + + var faker = {}; + faker[Symbol.toStringTag] = 'Symbol'; + st.equal( + inspect(faker), + '{ [Symbol(Symbol.toStringTag)]: \'Symbol\' }', + 'object lying about being a Symbol inspects as an object' + ); + }); + + t.end(); +}); + +test('Map', { skip: typeof Map !== 'function' }, function (t) { + var map = new Map(); + map.set({ a: 1 }, ['b']); + map.set(3, NaN); + var expectedString = 'Map (2) {' + inspect({ a: 1 }) + ' => ' + inspect(['b']) + ', 3 => NaN}'; + t.equal(inspect(map), expectedString, 'new Map([[{ a: 1 }, ["b"]], [3, NaN]]) should show size and contents'); + t.equal(inspect(new Map()), 'Map (0) {}', 'empty Map should show as empty'); + + var nestedMap = new Map(); + nestedMap.set(nestedMap, map); + t.equal(inspect(nestedMap), 'Map (1) {[Circular] => ' + expectedString + '}', 'Map containing a Map should work'); + + t.end(); +}); + +test('WeakMap', { skip: typeof WeakMap !== 'function' }, function (t) { + var map = new WeakMap(); + map.set({ a: 1 }, ['b']); + var expectedString = 'WeakMap { ? }'; + t.equal(inspect(map), expectedString, 'new WeakMap([[{ a: 1 }, ["b"]]]) should not show size or contents'); + t.equal(inspect(new WeakMap()), 'WeakMap { ? }', 'empty WeakMap should not show as empty'); + + t.end(); +}); + +test('Set', { skip: typeof Set !== 'function' }, function (t) { + var set = new Set(); + set.add({ a: 1 }); + set.add(['b']); + var expectedString = 'Set (2) {' + inspect({ a: 1 }) + ', ' + inspect(['b']) + '}'; + t.equal(inspect(set), expectedString, 'new Set([{ a: 1 }, ["b"]]) should show size and contents'); + t.equal(inspect(new Set()), 'Set (0) {}', 'empty Set should show as empty'); + + var nestedSet = new Set(); + nestedSet.add(set); + nestedSet.add(nestedSet); + t.equal(inspect(nestedSet), 'Set (2) {' + expectedString + ', [Circular]}', 'Set containing a Set should work'); + + t.end(); +}); + +test('WeakSet', { skip: typeof WeakSet !== 'function' }, function (t) { + var map = new WeakSet(); + map.add({ a: 1 }); + var expectedString = 'WeakSet { ? }'; + t.equal(inspect(map), expectedString, 'new WeakSet([{ a: 1 }]) should not show size or contents'); + t.equal(inspect(new WeakSet()), 'WeakSet { ? }', 'empty WeakSet should not show as empty'); + + t.end(); +}); + +test('WeakRef', { skip: typeof WeakRef !== 'function' }, function (t) { + var ref = new WeakRef({ a: 1 }); + var expectedString = 'WeakRef { ? }'; + t.equal(inspect(ref), expectedString, 'new WeakRef({ a: 1 }) should not show contents'); + + t.end(); +}); + +test('FinalizationRegistry', { skip: typeof FinalizationRegistry !== 'function' }, function (t) { + var registry = new FinalizationRegistry(function () {}); + var expectedString = 'FinalizationRegistry [FinalizationRegistry] {}'; + t.equal(inspect(registry), expectedString, 'new FinalizationRegistry(function () {}) should work normallys'); + + t.end(); +}); + +test('Strings', function (t) { + var str = 'abc'; + + t.equal(inspect(str), "'" + str + "'", 'primitive string shows as such'); + t.equal(inspect(str, { quoteStyle: 'single' }), "'" + str + "'", 'primitive string shows as such, single quoted'); + t.equal(inspect(str, { quoteStyle: 'double' }), '"' + str + '"', 'primitive string shows as such, double quoted'); + t.equal(inspect(Object(str)), 'Object(' + inspect(str) + ')', 'String object shows as such'); + t.equal(inspect(Object(str), { quoteStyle: 'single' }), 'Object(' + inspect(str, { quoteStyle: 'single' }) + ')', 'String object shows as such, single quoted'); + t.equal(inspect(Object(str), { quoteStyle: 'double' }), 'Object(' + inspect(str, { quoteStyle: 'double' }) + ')', 'String object shows as such, double quoted'); + + t.end(); +}); + +test('Numbers', function (t) { + var num = 42; + + t.equal(inspect(num), String(num), 'primitive number shows as such'); + t.equal(inspect(Object(num)), 'Object(' + inspect(num) + ')', 'Number object shows as such'); + + t.end(); +}); + +test('Booleans', function (t) { + t.equal(inspect(true), String(true), 'primitive true shows as such'); + t.equal(inspect(Object(true)), 'Object(' + inspect(true) + ')', 'Boolean object true shows as such'); + + t.equal(inspect(false), String(false), 'primitive false shows as such'); + t.equal(inspect(Object(false)), 'Object(' + inspect(false) + ')', 'Boolean false object shows as such'); + + t.end(); +}); + +test('Date', function (t) { + var now = new Date(); + t.equal(inspect(now), String(now), 'Date shows properly'); + t.equal(inspect(new Date(NaN)), 'Invalid Date', 'Invalid Date shows properly'); + + t.end(); +}); + +test('RegExps', function (t) { + t.equal(inspect(/a/g), '/a/g', 'regex shows properly'); + t.equal(inspect(new RegExp('abc', 'i')), '/abc/i', 'new RegExp shows properly'); + + var match = 'abc abc'.match(/[ab]+/); + delete match.groups; // for node < 10 + t.equal(inspect(match), '[ \'ab\', index: 0, input: \'abc abc\' ]', 'RegExp match object shows properly'); + + t.end(); +}); diff --git a/node_modules/object-inspect/util.inspect.js b/node_modules/object-inspect/util.inspect.js new file mode 100644 index 0000000..7784fab --- /dev/null +++ b/node_modules/object-inspect/util.inspect.js @@ -0,0 +1 @@ +module.exports = require('util').inspect; diff --git a/node_modules/on-finished/HISTORY.md b/node_modules/on-finished/HISTORY.md new file mode 100644 index 0000000..98ff0e9 --- /dev/null +++ b/node_modules/on-finished/HISTORY.md @@ -0,0 +1,88 @@ +2.3.0 / 2015-05-26 +================== + + * Add defined behavior for HTTP `CONNECT` requests + * Add defined behavior for HTTP `Upgrade` requests + * deps: ee-first@1.1.1 + +2.2.1 / 2015-04-22 +================== + + * Fix `isFinished(req)` when data buffered + +2.2.0 / 2014-12-22 +================== + + * Add message object to callback arguments + +2.1.1 / 2014-10-22 +================== + + * Fix handling of pipelined requests + +2.1.0 / 2014-08-16 +================== + + * Check if `socket` is detached + * Return `undefined` for `isFinished` if state unknown + +2.0.0 / 2014-08-16 +================== + + * Add `isFinished` function + * Move to `jshttp` organization + * Remove support for plain socket argument + * Rename to `on-finished` + * Support both `req` and `res` as arguments + * deps: ee-first@1.0.5 + +1.2.2 / 2014-06-10 +================== + + * Reduce listeners added to emitters + - avoids "event emitter leak" warnings when used multiple times on same request + +1.2.1 / 2014-06-08 +================== + + * Fix returned value when already finished + +1.2.0 / 2014-06-05 +================== + + * Call callback when called on already-finished socket + +1.1.4 / 2014-05-27 +================== + + * Support node.js 0.8 + +1.1.3 / 2014-04-30 +================== + + * Make sure errors passed as instanceof `Error` + +1.1.2 / 2014-04-18 +================== + + * Default the `socket` to passed-in object + +1.1.1 / 2014-01-16 +================== + + * Rename module to `finished` + +1.1.0 / 2013-12-25 +================== + + * Call callback when called on already-errored socket + +1.0.1 / 2013-12-20 +================== + + * Actually pass the error to the callback + +1.0.0 / 2013-12-20 +================== + + * Initial release diff --git a/node_modules/on-finished/LICENSE b/node_modules/on-finished/LICENSE new file mode 100644 index 0000000..5931fd2 --- /dev/null +++ b/node_modules/on-finished/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2013 Jonathan Ong +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/on-finished/README.md b/node_modules/on-finished/README.md new file mode 100644 index 0000000..a0e1157 --- /dev/null +++ b/node_modules/on-finished/README.md @@ -0,0 +1,154 @@ +# on-finished + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Execute a callback when a HTTP request closes, finishes, or errors. + +## Install + +```sh +$ npm install on-finished +``` + +## API + +```js +var onFinished = require('on-finished') +``` + +### onFinished(res, listener) + +Attach a listener to listen for the response to finish. The listener will +be invoked only once when the response finished. If the response finished +to an error, the first argument will contain the error. If the response +has already finished, the listener will be invoked. + +Listening to the end of a response would be used to close things associated +with the response, like open files. + +Listener is invoked as `listener(err, res)`. + +```js +onFinished(res, function (err, res) { + // clean up open fds, etc. + // err contains the error is request error'd +}) +``` + +### onFinished(req, listener) + +Attach a listener to listen for the request to finish. The listener will +be invoked only once when the request finished. If the request finished +to an error, the first argument will contain the error. If the request +has already finished, the listener will be invoked. + +Listening to the end of a request would be used to know when to continue +after reading the data. + +Listener is invoked as `listener(err, req)`. + +```js +var data = '' + +req.setEncoding('utf8') +res.on('data', function (str) { + data += str +}) + +onFinished(req, function (err, req) { + // data is read unless there is err +}) +``` + +### onFinished.isFinished(res) + +Determine if `res` is already finished. This would be useful to check and +not even start certain operations if the response has already finished. + +### onFinished.isFinished(req) + +Determine if `req` is already finished. This would be useful to check and +not even start certain operations if the request has already finished. + +## Special Node.js requests + +### HTTP CONNECT method + +The meaning of the `CONNECT` method from RFC 7231, section 4.3.6: + +> The CONNECT method requests that the recipient establish a tunnel to +> the destination origin server identified by the request-target and, +> if successful, thereafter restrict its behavior to blind forwarding +> of packets, in both directions, until the tunnel is closed. Tunnels +> are commonly used to create an end-to-end virtual connection, through +> one or more proxies, which can then be secured using TLS (Transport +> Layer Security, [RFC5246]). + +In Node.js, these request objects come from the `'connect'` event on +the HTTP server. + +When this module is used on a HTTP `CONNECT` request, the request is +considered "finished" immediately, **due to limitations in the Node.js +interface**. This means if the `CONNECT` request contains a request entity, +the request will be considered "finished" even before it has been read. + +There is no such thing as a response object to a `CONNECT` request in +Node.js, so there is no support for for one. + +### HTTP Upgrade request + +The meaning of the `Upgrade` header from RFC 7230, section 6.1: + +> The "Upgrade" header field is intended to provide a simple mechanism +> for transitioning from HTTP/1.1 to some other protocol on the same +> connection. + +In Node.js, these request objects come from the `'upgrade'` event on +the HTTP server. + +When this module is used on a HTTP request with an `Upgrade` header, the +request is considered "finished" immediately, **due to limitations in the +Node.js interface**. This means if the `Upgrade` request contains a request +entity, the request will be considered "finished" even before it has been +read. + +There is no such thing as a response object to a `Upgrade` request in +Node.js, so there is no support for for one. + +## Example + +The following code ensures that file descriptors are always closed +once the response finishes. + +```js +var destroy = require('destroy') +var http = require('http') +var onFinished = require('on-finished') + +http.createServer(function onRequest(req, res) { + var stream = fs.createReadStream('package.json') + stream.pipe(res) + onFinished(res, function (err) { + destroy(stream) + }) +}) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/on-finished.svg +[npm-url]: https://npmjs.org/package/on-finished +[node-version-image]: https://img.shields.io/node/v/on-finished.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/on-finished/master.svg +[travis-url]: https://travis-ci.org/jshttp/on-finished +[coveralls-image]: https://img.shields.io/coveralls/jshttp/on-finished/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/on-finished?branch=master +[downloads-image]: https://img.shields.io/npm/dm/on-finished.svg +[downloads-url]: https://npmjs.org/package/on-finished diff --git a/node_modules/on-finished/index.js b/node_modules/on-finished/index.js new file mode 100644 index 0000000..9abd98f --- /dev/null +++ b/node_modules/on-finished/index.js @@ -0,0 +1,196 @@ +/*! + * on-finished + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = onFinished +module.exports.isFinished = isFinished + +/** + * Module dependencies. + * @private + */ + +var first = require('ee-first') + +/** + * Variables. + * @private + */ + +/* istanbul ignore next */ +var defer = typeof setImmediate === 'function' + ? setImmediate + : function(fn){ process.nextTick(fn.bind.apply(fn, arguments)) } + +/** + * Invoke callback when the response has finished, useful for + * cleaning up resources afterwards. + * + * @param {object} msg + * @param {function} listener + * @return {object} + * @public + */ + +function onFinished(msg, listener) { + if (isFinished(msg) !== false) { + defer(listener, null, msg) + return msg + } + + // attach the listener to the message + attachListener(msg, listener) + + return msg +} + +/** + * Determine if message is already finished. + * + * @param {object} msg + * @return {boolean} + * @public + */ + +function isFinished(msg) { + var socket = msg.socket + + if (typeof msg.finished === 'boolean') { + // OutgoingMessage + return Boolean(msg.finished || (socket && !socket.writable)) + } + + if (typeof msg.complete === 'boolean') { + // IncomingMessage + return Boolean(msg.upgrade || !socket || !socket.readable || (msg.complete && !msg.readable)) + } + + // don't know + return undefined +} + +/** + * Attach a finished listener to the message. + * + * @param {object} msg + * @param {function} callback + * @private + */ + +function attachFinishedListener(msg, callback) { + var eeMsg + var eeSocket + var finished = false + + function onFinish(error) { + eeMsg.cancel() + eeSocket.cancel() + + finished = true + callback(error) + } + + // finished on first message event + eeMsg = eeSocket = first([[msg, 'end', 'finish']], onFinish) + + function onSocket(socket) { + // remove listener + msg.removeListener('socket', onSocket) + + if (finished) return + if (eeMsg !== eeSocket) return + + // finished on first socket event + eeSocket = first([[socket, 'error', 'close']], onFinish) + } + + if (msg.socket) { + // socket already assigned + onSocket(msg.socket) + return + } + + // wait for socket to be assigned + msg.on('socket', onSocket) + + if (msg.socket === undefined) { + // node.js 0.8 patch + patchAssignSocket(msg, onSocket) + } +} + +/** + * Attach the listener to the message. + * + * @param {object} msg + * @return {function} + * @private + */ + +function attachListener(msg, listener) { + var attached = msg.__onFinished + + // create a private single listener with queue + if (!attached || !attached.queue) { + attached = msg.__onFinished = createListener(msg) + attachFinishedListener(msg, attached) + } + + attached.queue.push(listener) +} + +/** + * Create listener on message. + * + * @param {object} msg + * @return {function} + * @private + */ + +function createListener(msg) { + function listener(err) { + if (msg.__onFinished === listener) msg.__onFinished = null + if (!listener.queue) return + + var queue = listener.queue + listener.queue = null + + for (var i = 0; i < queue.length; i++) { + queue[i](err, msg) + } + } + + listener.queue = [] + + return listener +} + +/** + * Patch ServerResponse.prototype.assignSocket for node.js 0.8. + * + * @param {ServerResponse} res + * @param {function} callback + * @private + */ + +function patchAssignSocket(res, callback) { + var assignSocket = res.assignSocket + + if (typeof assignSocket !== 'function') return + + // res.on('socket', callback) is broken in 0.8 + res.assignSocket = function _assignSocket(socket) { + assignSocket.call(this, socket) + callback(socket) + } +} diff --git a/node_modules/on-finished/package.json b/node_modules/on-finished/package.json new file mode 100644 index 0000000..b9df1bd --- /dev/null +++ b/node_modules/on-finished/package.json @@ -0,0 +1,31 @@ +{ + "name": "on-finished", + "description": "Execute a callback when a request closes, finishes, or errors", + "version": "2.3.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/on-finished", + "dependencies": { + "ee-first": "1.1.1" + }, + "devDependencies": { + "istanbul": "0.3.9", + "mocha": "2.2.5" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/on-headers/HISTORY.md b/node_modules/on-headers/HISTORY.md new file mode 100644 index 0000000..090598d --- /dev/null +++ b/node_modules/on-headers/HISTORY.md @@ -0,0 +1,21 @@ +1.0.2 / 2019-02-21 +================== + + * Fix `res.writeHead` patch missing return value + +1.0.1 / 2015-09-29 +================== + + * perf: enable strict mode + +1.0.0 / 2014-08-10 +================== + + * Honor `res.statusCode` change in `listener` + * Move to `jshttp` organization + * Prevent `arguments`-related de-opt + +0.0.0 / 2014-05-13 +================== + + * Initial implementation diff --git a/node_modules/on-headers/LICENSE b/node_modules/on-headers/LICENSE new file mode 100644 index 0000000..b7dce6c --- /dev/null +++ b/node_modules/on-headers/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/on-headers/README.md b/node_modules/on-headers/README.md new file mode 100644 index 0000000..ae84282 --- /dev/null +++ b/node_modules/on-headers/README.md @@ -0,0 +1,81 @@ +# on-headers + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Execute a listener when a response is about to write headers. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install on-headers +``` + +## API + + + +```js +var onHeaders = require('on-headers') +``` + +### onHeaders(res, listener) + +This will add the listener `listener` to fire when headers are emitted for `res`. +The listener is passed the `response` object as it's context (`this`). Headers are +considered to be emitted only once, right before they are sent to the client. + +When this is called multiple times on the same `res`, the `listener`s are fired +in the reverse order they were added. + +## Examples + +```js +var http = require('http') +var onHeaders = require('on-headers') + +http + .createServer(onRequest) + .listen(3000) + +function addPoweredBy () { + // set if not set by end of request + if (!this.getHeader('X-Powered-By')) { + this.setHeader('X-Powered-By', 'Node.js') + } +} + +function onRequest (req, res) { + onHeaders(res, addPoweredBy) + + res.setHeader('Content-Type', 'text/plain') + res.end('hello!') +} +``` + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/on-headers/master +[coveralls-url]: https://coveralls.io/r/jshttp/on-headers?branch=master +[node-version-image]: https://badgen.net/npm/node/on-headers +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/on-headers +[npm-url]: https://npmjs.org/package/on-headers +[npm-version-image]: https://badgen.net/npm/v/on-headers +[travis-image]: https://badgen.net/travis/jshttp/on-headers/master +[travis-url]: https://travis-ci.org/jshttp/on-headers diff --git a/node_modules/on-headers/index.js b/node_modules/on-headers/index.js new file mode 100644 index 0000000..7db6375 --- /dev/null +++ b/node_modules/on-headers/index.js @@ -0,0 +1,132 @@ +/*! + * on-headers + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = onHeaders + +/** + * Create a replacement writeHead method. + * + * @param {function} prevWriteHead + * @param {function} listener + * @private + */ + +function createWriteHead (prevWriteHead, listener) { + var fired = false + + // return function with core name and argument list + return function writeHead (statusCode) { + // set headers from arguments + var args = setWriteHeadHeaders.apply(this, arguments) + + // fire listener + if (!fired) { + fired = true + listener.call(this) + + // pass-along an updated status code + if (typeof args[0] === 'number' && this.statusCode !== args[0]) { + args[0] = this.statusCode + args.length = 1 + } + } + + return prevWriteHead.apply(this, args) + } +} + +/** + * Execute a listener when a response is about to write headers. + * + * @param {object} res + * @return {function} listener + * @public + */ + +function onHeaders (res, listener) { + if (!res) { + throw new TypeError('argument res is required') + } + + if (typeof listener !== 'function') { + throw new TypeError('argument listener must be a function') + } + + res.writeHead = createWriteHead(res.writeHead, listener) +} + +/** + * Set headers contained in array on the response object. + * + * @param {object} res + * @param {array} headers + * @private + */ + +function setHeadersFromArray (res, headers) { + for (var i = 0; i < headers.length; i++) { + res.setHeader(headers[i][0], headers[i][1]) + } +} + +/** + * Set headers contained in object on the response object. + * + * @param {object} res + * @param {object} headers + * @private + */ + +function setHeadersFromObject (res, headers) { + var keys = Object.keys(headers) + for (var i = 0; i < keys.length; i++) { + var k = keys[i] + if (k) res.setHeader(k, headers[k]) + } +} + +/** + * Set headers and other properties on the response object. + * + * @param {number} statusCode + * @private + */ + +function setWriteHeadHeaders (statusCode) { + var length = arguments.length + var headerIndex = length > 1 && typeof arguments[1] === 'string' + ? 2 + : 1 + + var headers = length >= headerIndex + 1 + ? arguments[headerIndex] + : undefined + + this.statusCode = statusCode + + if (Array.isArray(headers)) { + // handle array case + setHeadersFromArray(this, headers) + } else if (headers) { + // handle object case + setHeadersFromObject(this, headers) + } + + // copy leading arguments + var args = new Array(Math.min(length, headerIndex)) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + + return args +} diff --git a/node_modules/on-headers/package.json b/node_modules/on-headers/package.json new file mode 100644 index 0000000..1e9bf9e --- /dev/null +++ b/node_modules/on-headers/package.json @@ -0,0 +1,42 @@ +{ + "name": "on-headers", + "description": "Execute a listener when a response is about to write headers", + "version": "1.0.2", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "event", + "headers", + "http", + "onheaders" + ], + "repository": "jshttp/on-headers", + "devDependencies": { + "eslint": "5.14.1", + "eslint-config-standard": "12.0.0", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-markdown": "1.0.0", + "eslint-plugin-node": "8.0.1", + "eslint-plugin-promise": "4.0.1", + "eslint-plugin-standard": "4.0.0", + "istanbul": "0.4.5", + "mocha": "6.0.1", + "supertest": "3.4.2" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 0000000..1f1ffca --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 0000000..2354067 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 0000000..16815b2 --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/prefix/.npmignore b/node_modules/prefix/.npmignore new file mode 100644 index 0000000..665aa21 --- /dev/null +++ b/node_modules/prefix/.npmignore @@ -0,0 +1,3 @@ +components +build +node_modules diff --git a/node_modules/prefix/LICENSE b/node_modules/prefix/LICENSE new file mode 100644 index 0000000..f7a2c5e --- /dev/null +++ b/node_modules/prefix/LICENSE @@ -0,0 +1,23 @@ + +The MIT License + +Copyright (c) 2013 - present Jake Rosoman + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/prefix/Makefile b/node_modules/prefix/Makefile new file mode 100644 index 0000000..d29d1a8 --- /dev/null +++ b/node_modules/prefix/Makefile @@ -0,0 +1,10 @@ +serve: node_modules + @$ WebkitTransform +prefix('color') // => color +``` + +### dash(key) + + create a dasherize version of a vendor prefix + +```js +prefix.dash('transform') // => -webkit-transform +prefix.dash('color') // => color +``` + +## Running the tests + +Just run `make` and navigate your browser to the test directory. diff --git a/node_modules/prefix/component.json b/node_modules/prefix/component.json new file mode 100644 index 0000000..0402cea --- /dev/null +++ b/node_modules/prefix/component.json @@ -0,0 +1,9 @@ +{ + "name": "prefix", + "repo": "jkroso/prefix", + "description": "Prefix CSS attributes", + "keywords": ["dom", "css", "prefix"], + "dependencies": {}, + "license": "MIT", + "scripts": ["index.js"] +} diff --git a/node_modules/prefix/index.js b/node_modules/prefix/index.js new file mode 100644 index 0000000..26517c2 --- /dev/null +++ b/node_modules/prefix/index.js @@ -0,0 +1,69 @@ +// check document first so it doesn't error in node.js +var style = typeof document != 'undefined' + ? document.createElement('p').style + : {} + +var prefixes = ['O', 'ms', 'Moz', 'Webkit'] +var upper = /([A-Z])/g +var memo = {} + +/** + * prefix `key` + * + * prefix('transform') // => WebkitTransform + * + * @param {String} key + * @return {String} + * @api public + */ +function prefix(key){ + // Camel case + key = key.replace(/-([a-z])/g, function(_, char){ + return char.toUpperCase() + }) + + // Without prefix + if (style[key] !== undefined) return key + + // With prefix + var Key = key.charAt(0).toUpperCase() + key.slice(1) + var i = prefixes.length + while (i--) { + var name = prefixes[i] + Key + if (style[name] !== undefined) return name + } + + return key +} + +/** + * Memoized version of `prefix` + * + * @param {String} key + * @return {String} + * @api public + */ +function prefixMemozied(key){ + return key in memo + ? memo[key] + : memo[key] = prefix(key) +} + +/** + * Create a dashed prefix + * + * @param {String} key + * @return {String} + * @api public + */ +function prefixDashed(key){ + key = prefix(key) + if (upper.test(key)) { + key = '-' + key.replace(upper, '-$1') + upper.lastIndex = 0 + } + return key.toLowerCase() +} + +module.exports = prefixMemozied +module.exports.dash = prefixDashed diff --git a/node_modules/prefix/package.json b/node_modules/prefix/package.json new file mode 100644 index 0000000..cb3c27c --- /dev/null +++ b/node_modules/prefix/package.json @@ -0,0 +1,15 @@ +{ + "name": "prefix", + "version": "1.0.0", + "description": "Prefix CSS attributes", + "dependencies": {}, + "devDependencies": { + "mocha": "^2.4.5", + "serve": "jkroso/serve#1.5.1" + }, + "repository": "git://github.com/jkroso/prefix.git", + "main": "index.js", + "keywords": ["prefix"], + "author": "Jake Rosoman", + "license": "MIT" +} diff --git a/node_modules/prefix/test/index.html b/node_modules/prefix/test/index.html new file mode 100644 index 0000000..f0309fb --- /dev/null +++ b/node_modules/prefix/test/index.html @@ -0,0 +1,23 @@ + + + + + prefix tests + + + + +
+ + + diff --git a/node_modules/prefix/test/prefix.spec.js b/node_modules/prefix/test/prefix.spec.js new file mode 100644 index 0000000..df51230 --- /dev/null +++ b/node_modules/prefix/test/prefix.spec.js @@ -0,0 +1,59 @@ +var assert = require('assert') +var prefix = require('..') + +describe('prefix', function(){ + it ('should not prefix things which don\'t need prefixes', function(){ + assert(prefix('border') == 'border') + }) + + it('should format dashed properties', function(){ + assert(prefix('background-color') == 'backgroundColor') + }) + + it ('may return a prefixed dom style for css3 style like transform', function(){ + assert(prefix('transform') in possibilities('transform')) + }) + + it('should memoize results', function(){ + assert(prefix('border') == 'border') + assert(prefix('border') == 'border') + assert(prefix('transform') in possibilities('transform')) + assert(prefix('transform') in possibilities('transform')) + }) + + it ('should return key as is if it can\'t find a correct key', function(){ + assert(prefix('some invalid key') == 'some invalid key') + }) +}) + +describe('dash', function(){ + var transformPossibilites = { + '-webkit-transform': null, + '-moz-transform': null, + '-ms-transform': null, + '-o-transform': null, + 'transform': null + } + + it('should create a dasherized string', function(){ + assert(prefix.dash('transform') in transformPossibilites) + }) + + it('should work if invoked many times', function(){ + assert(prefix.dash('transform') in transformPossibilites) + assert(prefix.dash('transform') == prefix.dash('transform')) + }) +}) + +function possibilities(key){ + return 'Moz O ms Webkit'.split(' ').map(function(pre){ + return pre + capitalize(key) + }).concat(key).reduce(function(o, k){ + o[k] = true + return o + }, {}) +} + +function capitalize(str){ + return str.charAt(0).toUpperCase() + str.slice(1) +} diff --git a/node_modules/qs/.editorconfig b/node_modules/qs/.editorconfig new file mode 100644 index 0000000..2f08444 --- /dev/null +++ b/node_modules/qs/.editorconfig @@ -0,0 +1,43 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 160 +quote_type = single + +[test/*] +max_line_length = off + +[LICENSE.md] +indent_size = off + +[*.md] +max_line_length = off + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[LICENSE] +indent_size = 2 +max_line_length = off + +[coverage/**/*] +indent_size = off +indent_style = off +indent = off +max_line_length = off + +[.nycrc] +indent_style = tab diff --git a/node_modules/qs/.eslintrc b/node_modules/qs/.eslintrc new file mode 100644 index 0000000..35220cd --- /dev/null +++ b/node_modules/qs/.eslintrc @@ -0,0 +1,38 @@ +{ + "root": true, + + "extends": "@ljharb", + + "ignorePatterns": [ + "dist/", + ], + + "rules": { + "complexity": 0, + "consistent-return": 1, + "func-name-matching": 0, + "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], + "indent": [2, 4], + "max-lines-per-function": [2, { "max": 150 }], + "max-params": [2, 16], + "max-statements": [2, 53], + "multiline-comment-style": 0, + "no-continue": 1, + "no-magic-numbers": 0, + "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], + }, + + "overrides": [ + { + "files": "test/**", + "rules": { + "function-paren-newline": 0, + "max-lines-per-function": 0, + "max-statements": 0, + "no-buffer-constructor": 0, + "no-extend-native": 0, + "no-throw-literal": 0, + }, + }, + ], +} diff --git a/node_modules/qs/.github/FUNDING.yml b/node_modules/qs/.github/FUNDING.yml new file mode 100644 index 0000000..0355f4f --- /dev/null +++ b/node_modules/qs/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/qs +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with a single custom sponsorship URL diff --git a/node_modules/qs/.nycrc b/node_modules/qs/.nycrc new file mode 100644 index 0000000..1d57cab --- /dev/null +++ b/node_modules/qs/.nycrc @@ -0,0 +1,13 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "dist" + ] +} diff --git a/node_modules/qs/CHANGELOG.md b/node_modules/qs/CHANGELOG.md new file mode 100644 index 0000000..37b1d3f --- /dev/null +++ b/node_modules/qs/CHANGELOG.md @@ -0,0 +1,546 @@ +## **6.11.0 +- [New] [Fix] `stringify`: revert 0e903c0; add `commaRoundTrip` option (#442) +- [readme] fix version badge + +## **6.10.5** +- [Fix] `stringify`: with `arrayFormat: comma`, properly include an explicit `[]` on a single-item array (#434) + +## **6.10.4** +- [Fix] `stringify`: with `arrayFormat: comma`, include an explicit `[]` on a single-item array (#441) +- [meta] use `npmignore` to autogenerate an npmignore file +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `has-symbol`, `object-inspect`, `tape` + +## **6.10.3** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [actions] reuse common workflows +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `object-inspect`, `tape` + +## **6.10.2** +- [Fix] `stringify`: actually fix cyclic references (#426) +- [Fix] `stringify`: avoid encoding arrayformat comma when `encodeValuesOnly = true` (#424) +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Docs] add note and links for coercing primitive values (#408) +- [actions] update codecov uploader +- [actions] update workflows +- [Tests] clean up stringify tests slightly +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `object-inspect`, `safe-publish-latest`, `tape` + +## **6.10.1** +- [Fix] `stringify`: avoid exception on repeated object values (#402) + +## **6.10.0** +- [New] `stringify`: throw on cycles, instead of an infinite loop (#395, #394, #393) +- [New] `parse`: add `allowSparse` option for collapsing arrays with missing indices (#312) +- [meta] fix README.md (#399) +- [meta] only run `npm run dist` in publish, not install +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `has-symbols`, `tape` +- [Tests] fix tests on node v0.6 +- [Tests] use `ljharb/actions/node/install` instead of `ljharb/actions/node/run` +- [Tests] Revert "[meta] ignore eclint transitive audit warning" + +## **6.9.7** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix] `stringify`: avoid encoding arrayformat comma when `encodeValuesOnly = true` (#424) +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Docs] add note and links for coercing primitive values (#408) +- [Tests] clean up stringify tests slightly +- [meta] fix README.md (#399) +- Revert "[meta] ignore eclint transitive audit warning" +- [actions] backport actions from main +- [Dev Deps] backport updates from main + +## **6.9.6** +- [Fix] restore `dist` dir; mistakenly removed in d4f6c32 + +## **6.9.5** +- [Fix] `stringify`: do not encode parens for RFC1738 +- [Fix] `stringify`: fix arrayFormat comma with empty array/objects (#350) +- [Refactor] `format`: remove `util.assign` call +- [meta] add "Allow Edits" workflow; update rebase workflow +- [actions] switch Automatic Rebase workflow to `pull_request_target` event +- [Tests] `stringify`: add tests for #378 +- [Tests] migrate tests to Github Actions +- [Tests] run `nyc` on all tests; use `tape` runner +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `mkdirp`, `object-inspect`, `tape`; add `aud` + +## **6.9.4** +- [Fix] `stringify`: when `arrayFormat` is `comma`, respect `serializeDate` (#364) +- [Refactor] `stringify`: reduce branching (part of #350) +- [Refactor] move `maybeMap` to `utils` +- [Dev Deps] update `browserify`, `tape` + +## **6.9.3** +- [Fix] proper comma parsing of URL-encoded commas (#361) +- [Fix] parses comma delimited array while having percent-encoded comma treated as normal text (#336) + +## **6.9.2** +- [Fix] `parse`: Fix parsing array from object with `comma` true (#359) +- [Fix] `parse`: throw a TypeError instead of an Error for bad charset (#349) +- [meta] ignore eclint transitive audit warning +- [meta] fix indentation in package.json +- [meta] add tidelift marketing copy +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `object-inspect`, `has-symbols`, `tape`, `mkdirp`, `iconv-lite` +- [actions] add automatic rebasing / merge commit blocking + +## **6.9.1** +- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) +- [Fix] `parse`: with comma true, do not split non-string values (#334) +- [meta] add `funding` field +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` +- [Tests] use shared travis-ci config + +## **6.9.0** +- [New] `parse`/`stringify`: Pass extra key/value argument to `decoder` (#333) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `evalmd` +- [Tests] `parse`: add passing `arrayFormat` tests +- [Tests] add `posttest` using `npx aud` to run `npm audit` without a lockfile +- [Tests] up to `node` `v12.10`, `v11.15`, `v10.16`, `v8.16` +- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray + +## **6.8.3** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [Fix] `stringify`: avoid encoding arrayformat comma when `encodeValuesOnly = true` (#424) +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Tests] clean up stringify tests slightly +- [Docs] add note and links for coercing primitive values (#408) +- [meta] fix README.md (#399) +- [actions] backport actions from main +- [Dev Deps] backport updates from main +- [Refactor] `stringify`: reduce branching +- [meta] do not publish workflow files + +## **6.8.2** +- [Fix] proper comma parsing of URL-encoded commas (#361) +- [Fix] parses comma delimited array while having percent-encoded comma treated as normal text (#336) + +## **6.8.1** +- [Fix] `parse`: Fix parsing array from object with `comma` true (#359) +- [Fix] `parse`: throw a TypeError instead of an Error for bad charset (#349) +- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) +- [fix] `parse`: with comma true, do not split non-string values (#334) +- [meta] add tidelift marketing copy +- [meta] add `funding` field +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape`, `safe-publish-latest`, `evalmd`, `has-symbols`, `iconv-lite`, `mkdirp`, `object-inspect` +- [Tests] `parse`: add passing `arrayFormat` tests +- [Tests] use shared travis-ci configs +- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray +- [actions] add automatic rebasing / merge commit blocking + +## **6.8.0** +- [New] add `depth=false` to preserve the original key; [Fix] `depth=0` should preserve the original key (#326) +- [New] [Fix] stringify symbols and bigints +- [Fix] ensure node 0.12 can stringify Symbols +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Refactor] `formats`: tiny bit of cleanup. +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `safe-publish-latest`, `iconv-lite`, `tape` +- [Tests] add tests for `depth=0` and `depth=false` behavior, both current and intuitive/intended (#326) +- [Tests] use `eclint` instead of `editorconfig-tools` +- [docs] readme: add security note +- [meta] add github sponsorship +- [meta] add FUNDING.yml +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause + +## **6.7.3** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix] `stringify`: avoid encoding arrayformat comma when `encodeValuesOnly = true` (#424) +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Docs] add note and links for coercing primitive values (#408) +- [meta] fix README.md (#399) +- [meta] do not publish workflow files +- [actions] backport actions from main +- [Dev Deps] backport updates from main +- [Tests] use `nyc` for coverage +- [Tests] clean up stringify tests slightly + +## **6.7.2** +- [Fix] proper comma parsing of URL-encoded commas (#361) +- [Fix] parses comma delimited array while having percent-encoded comma treated as normal text (#336) + +## **6.7.1** +- [Fix] `parse`: Fix parsing array from object with `comma` true (#359) +- [Fix] `parse`: with comma true, handle field that holds an array of arrays (#335) +- [fix] `parse`: with comma true, do not split non-string values (#334) +- [Fix] `parse`: throw a TypeError instead of an Error for bad charset (#349) +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Refactor] `formats`: tiny bit of cleanup. +- readme: add security note +- [meta] add tidelift marketing copy +- [meta] add `funding` field +- [meta] add FUNDING.yml +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape`, `safe-publish-latest`, `evalmd`, `iconv-lite`, `mkdirp`, `object-inspect`, `browserify` +- [Tests] `parse`: add passing `arrayFormat` tests +- [Tests] use shared travis-ci configs +- [Tests] `Buffer.from` in node v5.0-v5.9 and v4.0-v4.4 requires a TypedArray +- [Tests] add tests for `depth=0` and `depth=false` behavior, both current and intuitive/intended +- [Tests] use `eclint` instead of `editorconfig-tools` +- [actions] add automatic rebasing / merge commit blocking + +## **6.7.0** +- [New] `stringify`/`parse`: add `comma` as an `arrayFormat` option (#276, #219) +- [Fix] correctly parse nested arrays (#212) +- [Fix] `utils.merge`: avoid a crash with a null target and a truthy non-array source, also with an array source +- [Robustness] `stringify`: cache `Object.prototype.hasOwnProperty` +- [Refactor] `utils`: `isBuffer`: small tweak; add tests +- [Refactor] use cached `Array.isArray` +- [Refactor] `parse`/`stringify`: make a function to normalize the options +- [Refactor] `utils`: reduce observable [[Get]]s +- [Refactor] `stringify`/`utils`: cache `Array.isArray` +- [Tests] always use `String(x)` over `x.toString()` +- [Tests] fix Buffer tests to work in node < 4.5 and node < 5.10 +- [Tests] temporarily allow coverage to fail + +## **6.6.1** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Fix] `utils.merge`: avoid a crash with a null target and an array source +- [Fix] `utils.merge`: avoid a crash with a null target and a truthy non-array source +- [Fix] correctly parse nested arrays +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [Robustness] `stringify`: cache `Object.prototype.hasOwnProperty` +- [Refactor] `formats`: tiny bit of cleanup. +- [Refactor] `utils`: `isBuffer`: small tweak; add tests +- [Refactor]: `stringify`/`utils`: cache `Array.isArray` +- [Refactor] `utils`: reduce observable [[Get]]s +- [Refactor] use cached `Array.isArray` +- [Refactor] `parse`/`stringify`: make a function to normalize the options +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Docs] Clarify the need for "arrayLimit" option +- [meta] fix README.md (#399) +- [meta] do not publish workflow files +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause +- [meta] add FUNDING.yml +- [meta] Fixes typo in CHANGELOG.md +- [actions] backport actions from main +- [Tests] fix Buffer tests to work in node < 4.5 and node < 5.10 +- [Tests] always use `String(x)` over `x.toString()` +- [Dev Deps] backport from main + +## **6.6.0** +- [New] Add support for iso-8859-1, utf8 "sentinel" and numeric entities (#268) +- [New] move two-value combine to a `utils` function (#189) +- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` (#260) +- [Fix] `stringify`: do not crash in an obscure combo of `interpretNumericEntities`, a bad custom `decoder`, & `iso-8859-1` +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) +- [Refactor] `parse`: only need to reassign the var once +- [Refactor] `parse`/`stringify`: clean up `charset` options checking; fix defaults +- [Refactor] add missing defaults +- [Refactor] `parse`: one less `concat` call +- [Refactor] `utils`: `compactQueue`: make it explicitly side-effecting +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`, `iconv-lite`, `safe-publish-latest`, `tape` +- [Tests] up to `node` `v10.10`, `v9.11`, `v8.12`, `v6.14`, `v4.9`; pin included builds to LTS + +## **6.5.3** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix]` `utils.merge`: avoid a crash with a null target and a truthy non-array source +- [Fix] correctly parse nested arrays +- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Fix] `utils.merge`: avoid a crash with a null target and an array source +- [Refactor] `utils`: reduce observable [[Get]]s +- [Refactor] use cached `Array.isArray` +- [Refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) +- [Refactor] `parse`: only need to reassign the var once +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Docs] Clean up license text so it’s properly detected as BSD-3-Clause +- [Docs] Clarify the need for "arrayLimit" option +- [meta] fix README.md (#399) +- [meta] add FUNDING.yml +- [actions] backport actions from main +- [Tests] always use `String(x)` over `x.toString()` +- [Tests] remove nonexistent tape option +- [Dev Deps] backport from main + +## **6.5.2** +- [Fix] use `safer-buffer` instead of `Buffer` constructor +- [Refactor] utils: `module.exports` one thing, instead of mutating `exports` (#230) +- [Dev Deps] update `browserify`, `eslint`, `iconv-lite`, `safer-buffer`, `tape`, `browserify` + +## **6.5.1** +- [Fix] Fix parsing & compacting very deep objects (#224) +- [Refactor] name utils functions +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` +- [Tests] up to `node` `v8.4`; use `nvm install-latest-npm` so newer npm doesn’t break older node +- [Tests] Use precise dist for Node.js 0.6 runtime (#225) +- [Tests] make 0.6 required, now that it’s passing +- [Tests] on `node` `v8.2`; fix npm on node 0.6 + +## **6.5.0** +- [New] add `utils.assign` +- [New] pass default encoder/decoder to custom encoder/decoder functions (#206) +- [New] `parse`/`stringify`: add `ignoreQueryPrefix`/`addQueryPrefix` options, respectively (#213) +- [Fix] Handle stringifying empty objects with addQueryPrefix (#217) +- [Fix] do not mutate `options` argument (#207) +- [Refactor] `parse`: cache index to reuse in else statement (#182) +- [Docs] add various badges to readme (#208) +- [Dev Deps] update `eslint`, `browserify`, `iconv-lite`, `tape` +- [Tests] up to `node` `v8.1`, `v7.10`, `v6.11`; npm v4.6 breaks on node < v1; npm v5+ breaks on node < v4 +- [Tests] add `editorconfig-tools` + +## **6.4.1** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Fix] use `safer-buffer` instead of `Buffer` constructor +- [Fix] `utils.merge`: avoid a crash with a null target and an array source +- [Fix]` `utils.merge`: avoid a crash with a null target and a truthy non-array source +- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [Refactor] use cached `Array.isArray` +- [Refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) +- [readme] remove travis badge; add github actions/codecov badges; update URLs +- [Docs] Clarify the need for "arrayLimit" option +- [meta] fix README.md (#399) +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause +- [meta] add FUNDING.yml +- [actions] backport actions from main +- [Tests] remove nonexistent tape option +- [Dev Deps] backport from main + +## **6.4.0** +- [New] `qs.stringify`: add `encodeValuesOnly` option +- [Fix] follow `allowPrototypes` option during merge (#201, #201) +- [Fix] support keys starting with brackets (#202, #200) +- [Fix] chmod a-x +- [Dev Deps] update `eslint` +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds +- [eslint] reduce warnings + +## **6.3.3** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix] fix for an impossible situation: when the formatter is called with a non-string value +- [Fix] `utils.merge`: avoid a crash with a null target and an array source +- [Fix]` `utils.merge`: avoid a crash with a null target and a truthy non-array source +- [Fix] `stringify`: fix a crash with `strictNullHandling` and a custom `filter`/`serializeDate` (#279) +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [Refactor] use cached `Array.isArray` +- [Refactor] `stringify`: Avoid arr = arr.concat(...), push to the existing instance (#269) +- [Docs] Clarify the need for "arrayLimit" option +- [meta] fix README.md (#399) +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause +- [meta] add FUNDING.yml +- [actions] backport actions from main +- [Tests] use `safer-buffer` instead of `Buffer` constructor +- [Tests] remove nonexistent tape option +- [Dev Deps] backport from main + +## **6.3.2** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Dev Deps] update `eslint` +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.3.1** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties (thanks, @snyk!) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `iconv-lite`, `qs-iconv`, `tape` +- [Tests] on all node minors; improve test matrix +- [Docs] document stringify option `allowDots` (#195) +- [Docs] add empty object and array values example (#195) +- [Docs] Fix minor inconsistency/typo (#192) +- [Docs] document stringify option `sort` (#191) +- [Refactor] `stringify`: throw faster with an invalid encoder +- [Refactor] remove unnecessary escapes (#184) +- Remove contributing.md, since `qs` is no longer part of `hapi` (#183) + +## **6.3.0** +- [New] Add support for RFC 1738 (#174, #173) +- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159) +- [Fix] ensure `utils.merge` handles merging two arrays +- [Refactor] only constructors should be capitalized +- [Refactor] capitalized var names are for constructors only +- [Refactor] avoid using a sparse array +- [Robustness] `formats`: cache `String#replace` +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest` +- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix +- [Tests] flesh out arrayLimit/arrayFormat tests (#107) +- [Tests] skip Object.create tests when null objects are not available +- [Tests] Turn on eslint for test files (#175) + +## **6.2.4** +- [Fix] `parse`: ignore `__proto__` keys (#428) +- [Fix] `utils.merge`: avoid a crash with a null target and an array source +- [Fix] `utils.merge`: avoid a crash with a null target and a truthy non-array source +- [Fix] `utils`: `merge`: fix crash when `source` is a truthy primitive & no options are provided +- [Fix] when `parseArrays` is false, properly handle keys ending in `[]` +- [Robustness] `stringify`: avoid relying on a global `undefined` (#427) +- [Refactor] use cached `Array.isArray` +- [Docs] Clarify the need for "arrayLimit" option +- [meta] fix README.md (#399) +- [meta] Clean up license text so it’s properly detected as BSD-3-Clause +- [meta] add FUNDING.yml +- [actions] backport actions from main +- [Tests] use `safer-buffer` instead of `Buffer` constructor +- [Tests] remove nonexistent tape option +- [Dev Deps] backport from main + +## **6.2.3** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.2.2** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties + +## **6.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values +- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call` +- [Tests] remove `parallelshell` since it does not reliably report failures +- [Tests] up to `node` `v6.3`, `v5.12` +- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv` + +## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) +- [New] pass Buffers to the encoder/decoder directly (#161) +- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) +- [Fix] fix compacting of nested sparse arrays (#150) + +## **6.1.2 +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.1.1** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties + +## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) +- [New] allowDots option for `stringify` (#151) +- [Fix] "sort" option should work at a depth of 3 or more (#151) +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## **6.0.4** +- [Fix] follow `allowPrototypes` option during merge (#201, #200) +- [Fix] chmod a-x +- [Fix] support keys starting with brackets (#202, #200) +- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds + +## **6.0.3** +- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) +- Revert ES6 requirement and restore support for node down to v0.8. + +## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) +- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json + +## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) +- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 + +## **5.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values + +## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) +- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string + +## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) +- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional +- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify + +## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) +- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false +- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm + +## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) +- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional + +## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) +- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" + +## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) +- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties +- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost +- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing +- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object +- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option +- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. +- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 +- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 +- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign +- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute + +## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) +- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function + +## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) +- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option + +## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) +- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 +- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader + +## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) +- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object + +## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) +- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". + +## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) +- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 + +## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) +- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? +- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 +- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 + +## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) +- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number + +## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) +- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array +- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x + +## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) +- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value +- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty +- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? + +## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) +- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 +- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects + +## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) +- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present +- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays +- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge +- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? + +## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) +- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter + +## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) +- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? +- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit +- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 + +## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) +- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values + +## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) +- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters +- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block + +## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) +- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument +- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed + +## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) +- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted +- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null +- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README + +## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) +- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/qs/LICENSE.md b/node_modules/qs/LICENSE.md new file mode 100644 index 0000000..fecf6b6 --- /dev/null +++ b/node_modules/qs/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2014, Nathan LaFreniere and other [contributors](https://github.com/ljharb/qs/graphs/contributors) +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/qs/README.md b/node_modules/qs/README.md new file mode 100644 index 0000000..11be853 --- /dev/null +++ b/node_modules/qs/README.md @@ -0,0 +1,625 @@ +# qs [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +A querystring parsing and stringifying library with some added security. + +Lead Maintainer: [Jordan Harband](https://github.com/ljharb) + +The **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring). + +## Usage + +```javascript +var qs = require('qs'); +var assert = require('assert'); + +var obj = qs.parse('a=c'); +assert.deepEqual(obj, { a: 'c' }); + +var str = qs.stringify(obj); +assert.equal(str, 'a=c'); +``` + +### Parsing Objects + +[](#preventEval) +```javascript +qs.parse(string, [options]); +``` + +**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`. +For example, the string `'foo[bar]=baz'` converts to: + +```javascript +assert.deepEqual(qs.parse('foo[bar]=baz'), { + foo: { + bar: 'baz' + } +}); +``` + +When using the `plainObjects` option the parsed value is returned as a null object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like: + +```javascript +var nullObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true }); +assert.deepEqual(nullObject, { a: { hasOwnProperty: 'b' } }); +``` + +By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option. + +```javascript +var protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }); +assert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } }); +``` + +URI encoded strings work too: + +```javascript +assert.deepEqual(qs.parse('a%5Bb%5D=c'), { + a: { b: 'c' } +}); +``` + +You can also nest your objects, like `'foo[bar][baz]=foobarbaz'`: + +```javascript +assert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), { + foo: { + bar: { + baz: 'foobarbaz' + } + } +}); +``` + +By default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like +`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be: + +```javascript +var expected = { + a: { + b: { + c: { + d: { + e: { + f: { + '[g][h][i]': 'j' + } + } + } + } + } + } +}; +var string = 'a[b][c][d][e][f][g][h][i]=j'; +assert.deepEqual(qs.parse(string), expected); +``` + +This depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`: + +```javascript +var deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 }); +assert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }); +``` + +The depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number. + +For similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option: + +```javascript +var limited = qs.parse('a=b&c=d', { parameterLimit: 1 }); +assert.deepEqual(limited, { a: 'b' }); +``` + +To bypass the leading question mark, use `ignoreQueryPrefix`: + +```javascript +var prefixed = qs.parse('?a=b&c=d', { ignoreQueryPrefix: true }); +assert.deepEqual(prefixed, { a: 'b', c: 'd' }); +``` + +An optional delimiter can also be passed: + +```javascript +var delimited = qs.parse('a=b;c=d', { delimiter: ';' }); +assert.deepEqual(delimited, { a: 'b', c: 'd' }); +``` + +Delimiters can be a regular expression too: + +```javascript +var regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ }); +assert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' }); +``` + +Option `allowDots` can be used to enable dot notation: + +```javascript +var withDots = qs.parse('a.b=c', { allowDots: true }); +assert.deepEqual(withDots, { a: { b: 'c' } }); +``` + +If you have to deal with legacy browsers or services, there's +also support for decoding percent-encoded octets as iso-8859-1: + +```javascript +var oldCharset = qs.parse('a=%A7', { charset: 'iso-8859-1' }); +assert.deepEqual(oldCharset, { a: '§' }); +``` + +Some services add an initial `utf8=✓` value to forms so that old +Internet Explorer versions are more likely to submit the form as +utf-8. Additionally, the server can check the value against wrong +encodings of the checkmark character and detect that a query string +or `application/x-www-form-urlencoded` body was *not* sent as +utf-8, eg. if the form had an `accept-charset` parameter or the +containing page had a different character set. + +**qs** supports this mechanism via the `charsetSentinel` option. +If specified, the `utf8` parameter will be omitted from the +returned object. It will be used to switch to `iso-8859-1`/`utf-8` +mode depending on how the checkmark is encoded. + +**Important**: When you specify both the `charset` option and the +`charsetSentinel` option, the `charset` will be overridden when +the request contains a `utf8` parameter from which the actual +charset can be deduced. In that sense the `charset` will behave +as the default charset rather than the authoritative charset. + +```javascript +var detectedAsUtf8 = qs.parse('utf8=%E2%9C%93&a=%C3%B8', { + charset: 'iso-8859-1', + charsetSentinel: true +}); +assert.deepEqual(detectedAsUtf8, { a: 'ø' }); + +// Browsers encode the checkmark as ✓ when submitting as iso-8859-1: +var detectedAsIso8859_1 = qs.parse('utf8=%26%2310003%3B&a=%F8', { + charset: 'utf-8', + charsetSentinel: true +}); +assert.deepEqual(detectedAsIso8859_1, { a: 'ø' }); +``` + +If you want to decode the `&#...;` syntax to the actual character, +you can specify the `interpretNumericEntities` option as well: + +```javascript +var detectedAsIso8859_1 = qs.parse('a=%26%239786%3B', { + charset: 'iso-8859-1', + interpretNumericEntities: true +}); +assert.deepEqual(detectedAsIso8859_1, { a: '☺' }); +``` + +It also works when the charset has been detected in `charsetSentinel` +mode. + +### Parsing Arrays + +**qs** can also parse arrays using a similar `[]` notation: + +```javascript +var withArray = qs.parse('a[]=b&a[]=c'); +assert.deepEqual(withArray, { a: ['b', 'c'] }); +``` + +You may specify an index as well: + +```javascript +var withIndexes = qs.parse('a[1]=c&a[0]=b'); +assert.deepEqual(withIndexes, { a: ['b', 'c'] }); +``` + +Note that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number +to create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving +their order: + +```javascript +var noSparse = qs.parse('a[1]=b&a[15]=c'); +assert.deepEqual(noSparse, { a: ['b', 'c'] }); +``` + +You may also use `allowSparse` option to parse sparse arrays: + +```javascript +var sparseArray = qs.parse('a[1]=2&a[3]=5', { allowSparse: true }); +assert.deepEqual(sparseArray, { a: [, '2', , '5'] }); +``` + +Note that an empty string is also a value, and will be preserved: + +```javascript +var withEmptyString = qs.parse('a[]=&a[]=b'); +assert.deepEqual(withEmptyString, { a: ['', 'b'] }); + +var withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c'); +assert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] }); +``` + +**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will +instead be converted to an object with the index as the key. This is needed to handle cases when someone sent, for example, `a[999999999]` and it will take significant time to iterate over this huge array. + +```javascript +var withMaxIndex = qs.parse('a[100]=b'); +assert.deepEqual(withMaxIndex, { a: { '100': 'b' } }); +``` + +This limit can be overridden by passing an `arrayLimit` option: + +```javascript +var withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 }); +assert.deepEqual(withArrayLimit, { a: { '1': 'b' } }); +``` + +To disable array parsing entirely, set `parseArrays` to `false`. + +```javascript +var noParsingArrays = qs.parse('a[]=b', { parseArrays: false }); +assert.deepEqual(noParsingArrays, { a: { '0': 'b' } }); +``` + +If you mix notations, **qs** will merge the two items into an object: + +```javascript +var mixedNotation = qs.parse('a[0]=b&a[b]=c'); +assert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } }); +``` + +You can also create arrays of objects: + +```javascript +var arraysOfObjects = qs.parse('a[][b]=c'); +assert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] }); +``` + +Some people use comma to join array, **qs** can parse it: +```javascript +var arraysOfObjects = qs.parse('a=b,c', { comma: true }) +assert.deepEqual(arraysOfObjects, { a: ['b', 'c'] }) +``` +(_this cannot convert nested objects, such as `a={b:1},{c:d}`_) + +### Parsing primitive/scalar values (numbers, booleans, null, etc) + +By default, all values are parsed as strings. This behavior will not change and is explained in [issue #91](https://github.com/ljharb/qs/issues/91). + +```javascript +var primitiveValues = qs.parse('a=15&b=true&c=null'); +assert.deepEqual(primitiveValues, { a: '15', b: 'true', c: 'null' }); +``` + +If you wish to auto-convert values which look like numbers, booleans, and other values into their primitive counterparts, you can use the [query-types Express JS middleware](https://github.com/xpepermint/query-types) which will auto-convert all request query parameters. + +### Stringifying + +[](#preventEval) +```javascript +qs.stringify(object, [options]); +``` + +When stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect: + +```javascript +assert.equal(qs.stringify({ a: 'b' }), 'a=b'); +assert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); +``` + +This encoding can be disabled by setting the `encode` option to `false`: + +```javascript +var unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false }); +assert.equal(unencoded, 'a[b]=c'); +``` + +Encoding can be disabled for keys by setting the `encodeValuesOnly` option to `true`: +```javascript +var encodedValues = qs.stringify( + { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, + { encodeValuesOnly: true } +); +assert.equal(encodedValues,'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h'); +``` + +This encoding can also be replaced by a custom encoding method set as `encoder` option: + +```javascript +var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str) { + // Passed in values `a`, `b`, `c` + return // Return encoded string +}}) +``` + +_(Note: the `encoder` option does not apply if `encode` is `false`)_ + +Analogue to the `encoder` there is a `decoder` option for `parse` to override decoding of properties and values: + +```javascript +var decoded = qs.parse('x=z', { decoder: function (str) { + // Passed in values `x`, `z` + return // Return decoded string +}}) +``` + +You can encode keys and values using different logic by using the type argument provided to the encoder: + +```javascript +var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str, defaultEncoder, charset, type) { + if (type === 'key') { + return // Encoded key + } else if (type === 'value') { + return // Encoded value + } +}}) +``` + +The type argument is also provided to the decoder: + +```javascript +var decoded = qs.parse('x=z', { decoder: function (str, defaultDecoder, charset, type) { + if (type === 'key') { + return // Decoded key + } else if (type === 'value') { + return // Decoded value + } +}}) +``` + +Examples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage. + +When arrays are stringified, by default they are given explicit indices: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }); +// 'a[0]=b&a[1]=c&a[2]=d' +``` + +You may override this by setting the `indices` option to `false`: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }); +// 'a=b&a=c&a=d' +``` + +You may use the `arrayFormat` option to specify the format of the output array: + +```javascript +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }) +// 'a[0]=b&a[1]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }) +// 'a[]=b&a[]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }) +// 'a=b&a=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'comma' }) +// 'a=b,c' +``` + +Note: when using `arrayFormat` set to `'comma'`, you can also pass the `commaRoundTrip` option set to `true` or `false`, to append `[]` on single-item arrays, so that they can round trip through a parse. + +When objects are stringified, by default they use bracket notation: + +```javascript +qs.stringify({ a: { b: { c: 'd', e: 'f' } } }); +// 'a[b][c]=d&a[b][e]=f' +``` + +You may override this to use dot notation by setting the `allowDots` option to `true`: + +```javascript +qs.stringify({ a: { b: { c: 'd', e: 'f' } } }, { allowDots: true }); +// 'a.b.c=d&a.b.e=f' +``` + +Empty strings and null values will omit the value, but the equals sign (=) remains in place: + +```javascript +assert.equal(qs.stringify({ a: '' }), 'a='); +``` + +Key with no values (such as an empty object or array) will return nothing: + +```javascript +assert.equal(qs.stringify({ a: [] }), ''); +assert.equal(qs.stringify({ a: {} }), ''); +assert.equal(qs.stringify({ a: [{}] }), ''); +assert.equal(qs.stringify({ a: { b: []} }), ''); +assert.equal(qs.stringify({ a: { b: {}} }), ''); +``` + +Properties that are set to `undefined` will be omitted entirely: + +```javascript +assert.equal(qs.stringify({ a: null, b: undefined }), 'a='); +``` + +The query string may optionally be prepended with a question mark: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { addQueryPrefix: true }), '?a=b&c=d'); +``` + +The delimiter may be overridden with stringify as well: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); +``` + +If you only want to override the serialization of `Date` objects, you can provide a `serializeDate` option: + +```javascript +var date = new Date(7); +assert.equal(qs.stringify({ a: date }), 'a=1970-01-01T00:00:00.007Z'.replace(/:/g, '%3A')); +assert.equal( + qs.stringify({ a: date }, { serializeDate: function (d) { return d.getTime(); } }), + 'a=7' +); +``` + +You may use the `sort` option to affect the order of parameter keys: + +```javascript +function alphabeticalSort(a, b) { + return a.localeCompare(b); +} +assert.equal(qs.stringify({ a: 'c', z: 'y', b : 'f' }, { sort: alphabeticalSort }), 'a=c&b=f&z=y'); +``` + +Finally, you can use the `filter` option to restrict which keys will be included in the stringified output. +If you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you +pass an array, it will be used to select properties and array indices for stringification: + +```javascript +function filterFunc(prefix, value) { + if (prefix == 'b') { + // Return an `undefined` value to omit a property. + return; + } + if (prefix == 'e[f]') { + return value.getTime(); + } + if (prefix == 'e[g][0]') { + return value * 2; + } + return value; +} +qs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc }); +// 'a=b&c=d&e[f]=123&e[g][0]=4' +qs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] }); +// 'a=b&e=f' +qs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] }); +// 'a[0]=b&a[2]=d' +``` + +### Handling of `null` values + +By default, `null` values are treated like empty strings: + +```javascript +var withNull = qs.stringify({ a: null, b: '' }); +assert.equal(withNull, 'a=&b='); +``` + +Parsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings. + +```javascript +var equalsInsensitive = qs.parse('a&b='); +assert.deepEqual(equalsInsensitive, { a: '', b: '' }); +``` + +To distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null` +values have no `=` sign: + +```javascript +var strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true }); +assert.equal(strictNull, 'a&b='); +``` + +To parse values without `=` back to `null` use the `strictNullHandling` flag: + +```javascript +var parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true }); +assert.deepEqual(parsedStrictNull, { a: null, b: '' }); +``` + +To completely skip rendering keys with `null` values, use the `skipNulls` flag: + +```javascript +var nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true }); +assert.equal(nullsSkipped, 'a=b'); +``` + +If you're communicating with legacy systems, you can switch to `iso-8859-1` +using the `charset` option: + +```javascript +var iso = qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }); +assert.equal(iso, '%E6=%E6'); +``` + +Characters that don't exist in `iso-8859-1` will be converted to numeric +entities, similar to what browsers do: + +```javascript +var numeric = qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }); +assert.equal(numeric, 'a=%26%239786%3B'); +``` + +You can use the `charsetSentinel` option to announce the character by +including an `utf8=✓` parameter with the proper encoding if the checkmark, +similar to what Ruby on Rails and others do when submitting forms. + +```javascript +var sentinel = qs.stringify({ a: '☺' }, { charsetSentinel: true }); +assert.equal(sentinel, 'utf8=%E2%9C%93&a=%E2%98%BA'); + +var isoSentinel = qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }); +assert.equal(isoSentinel, 'utf8=%26%2310003%3B&a=%E6'); +``` + +### Dealing with special character sets + +By default the encoding and decoding of characters is done in `utf-8`, +and `iso-8859-1` support is also built in via the `charset` parameter. + +If you wish to encode querystrings to a different character set (i.e. +[Shift JIS](https://en.wikipedia.org/wiki/Shift_JIS)) you can use the +[`qs-iconv`](https://github.com/martinheidegger/qs-iconv) library: + +```javascript +var encoder = require('qs-iconv/encoder')('shift_jis'); +var shiftJISEncoded = qs.stringify({ a: 'こんにちは!' }, { encoder: encoder }); +assert.equal(shiftJISEncoded, 'a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I'); +``` + +This also works for decoding of query strings: + +```javascript +var decoder = require('qs-iconv/decoder')('shift_jis'); +var obj = qs.parse('a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I', { decoder: decoder }); +assert.deepEqual(obj, { a: 'こんにちは!' }); +``` + +### RFC 3986 and RFC 1738 space encoding + +RFC3986 used as default option and encodes ' ' to *%20* which is backward compatible. +In the same time, output can be stringified as per RFC1738 with ' ' equal to '+'. + +``` +assert.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC3986' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC1738' }), 'a=b+c'); +``` + +## Security + +Please email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report. + +## qs for enterprise + +Available as part of the Tidelift Subscription + +The maintainers of qs and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-qs?utm_source=npm-qs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + +[package-url]: https://npmjs.org/package/qs +[npm-version-svg]: https://versionbadg.es/ljharb/qs.svg +[deps-svg]: https://david-dm.org/ljharb/qs.svg +[deps-url]: https://david-dm.org/ljharb/qs +[dev-deps-svg]: https://david-dm.org/ljharb/qs/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/qs#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/qs.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/qs.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/qs.svg +[downloads-url]: https://npm-stat.com/charts.html?package=qs +[codecov-image]: https://codecov.io/gh/ljharb/qs/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/ljharb/qs/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/qs +[actions-url]: https://github.com/ljharb/qs/actions diff --git a/node_modules/qs/dist/qs.js b/node_modules/qs/dist/qs.js new file mode 100644 index 0000000..1c620a4 --- /dev/null +++ b/node_modules/qs/dist/qs.js @@ -0,0 +1,2054 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i -1) { + return val.split(','); + } + + return val; +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = utils.maybeMap( + parseArrayValue(part.slice(pos + 1), options), + function (encodedVal) { + return options.decoder(encodedVal, defaults.decoder, charset, 'value'); + } + ); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options, valuesParsed) { + var leaf = valuesParsed ? val : parseArrayValue(val, options); + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else if (cleanRoot !== '__proto__') { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options, valuesParsed); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + allowSparse: typeof opts.allowSparse === 'boolean' ? opts.allowSparse : defaults.allowSparse, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + // eslint-disable-next-line no-implicit-coercion, no-extra-parens + depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string'); + obj = utils.merge(obj, newObj, options); + } + + if (options.allowSparse === true) { + return obj; + } + + return utils.compact(obj); +}; + +},{"./utils":5}],4:[function(require,module,exports){ +'use strict'; + +var getSideChannel = require('side-channel'); +var utils = require('./utils'); +var formats = require('./formats'); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var isArray = Array.isArray; +var split = String.prototype.split; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' + || typeof v === 'number' + || typeof v === 'boolean' + || typeof v === 'symbol' + || typeof v === 'bigint'; +}; + +var sentinel = {}; + +var stringify = function stringify( + object, + prefix, + generateArrayPrefix, + commaRoundTrip, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + sideChannel +) { + var obj = object; + + var tmpSc = sideChannel; + var step = 0; + var findFlag = false; + while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) { + // Where object last appeared in the ref tree + var pos = tmpSc.get(object); + step += 1; + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } else { + findFlag = true; // Break while + } + } + if (typeof tmpSc.get(sentinel) === 'undefined') { + step = 0; + } + } + + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = utils.maybeMap(obj, function (value) { + if (value instanceof Date) { + return serializeDate(value); + } + return value; + }); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key', format) : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format); + if (generateArrayPrefix === 'comma' && encodeValuesOnly) { + var valuesArray = split.call(String(obj), ','); + var valuesJoined = ''; + for (var i = 0; i < valuesArray.length; ++i) { + valuesJoined += (i === 0 ? '' : ',') + formatter(encoder(valuesArray[i], defaults.encoder, charset, 'value', format)); + } + return [formatter(keyValue) + (commaRoundTrip && isArray(obj) && valuesArray.length === 1 ? '[]' : '') + '=' + valuesJoined]; + } + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (generateArrayPrefix === 'comma' && isArray(obj)) { + // we need to join elements in + objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; + } else if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + var adjustedPrefix = commaRoundTrip && isArray(obj) && obj.length === 1 ? prefix + '[]' : prefix; + + for (var j = 0; j < objKeys.length; ++j) { + var key = objKeys[j]; + var value = typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key]; + + if (skipNulls && value === null) { + continue; + } + + var keyPrefix = isArray(obj) + ? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(adjustedPrefix, key) : adjustedPrefix + : adjustedPrefix + (allowDots ? '.' + key : '[' + key + ']'); + + sideChannel.set(object, step); + var valueSideChannel = getSideChannel(); + valueSideChannel.set(sentinel, sideChannel); + pushToArray(values, stringify( + value, + keyPrefix, + generateArrayPrefix, + commaRoundTrip, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + valueSideChannel + )); + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + if (opts && 'commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { + throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); + } + var commaRoundTrip = generateArrayPrefix === 'comma' && opts && opts.commaRoundTrip; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + var sideChannel = getSideChannel(); + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + commaRoundTrip, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.format, + options.formatter, + options.encodeValuesOnly, + options.charset, + sideChannel + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; + +},{"./formats":1,"./utils":5,"side-channel":16}],5:[function(require,module,exports){ +'use strict'; + +var formats = require('./formats'); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + /* eslint no-param-reassign: 0 */ + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset, kind, format) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + || (format === formats.RFC1738 && (c === 0x28 || c === 0x29)) // ( ) + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + /* eslint operator-linebreak: [2, "before"] */ + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +var maybeMap = function maybeMap(val, fn) { + if (isArray(val)) { + var mapped = []; + for (var i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + maybeMap: maybeMap, + merge: merge +}; + +},{"./formats":1}],6:[function(require,module,exports){ + +},{}],7:[function(require,module,exports){ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); + +var callBind = require('./'); + +var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf')); + +module.exports = function callBoundIntrinsic(name, allowMissing) { + var intrinsic = GetIntrinsic(name, !!allowMissing); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBind(intrinsic); + } + return intrinsic; +}; + +},{"./":8,"get-intrinsic":11}],8:[function(require,module,exports){ +'use strict'; + +var bind = require('function-bind'); +var GetIntrinsic = require('get-intrinsic'); + +var $apply = GetIntrinsic('%Function.prototype.apply%'); +var $call = GetIntrinsic('%Function.prototype.call%'); +var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply); + +var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true); +var $defineProperty = GetIntrinsic('%Object.defineProperty%', true); +var $max = GetIntrinsic('%Math.max%'); + +if ($defineProperty) { + try { + $defineProperty({}, 'a', { value: 1 }); + } catch (e) { + // IE 8 has a broken defineProperty + $defineProperty = null; + } +} + +module.exports = function callBind(originalFunction) { + var func = $reflectApply(bind, $call, arguments); + if ($gOPD && $defineProperty) { + var desc = $gOPD(func, 'length'); + if (desc.configurable) { + // original length, plus the receiver, minus any additional arguments (after the receiver) + $defineProperty( + func, + 'length', + { value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) } + ); + } + } + return func; +}; + +var applyBind = function applyBind() { + return $reflectApply(bind, $apply, arguments); +}; + +if ($defineProperty) { + $defineProperty(module.exports, 'apply', { value: applyBind }); +} else { + module.exports.apply = applyBind; +} + +},{"function-bind":10,"get-intrinsic":11}],9:[function(require,module,exports){ +'use strict'; + +/* eslint no-invalid-this: 1 */ + +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var slice = Array.prototype.slice; +var toStr = Object.prototype.toString; +var funcType = '[object Function]'; + +module.exports = function bind(that) { + var target = this; + if (typeof target !== 'function' || toStr.call(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + var args = slice.call(arguments, 1); + + var bound; + var binder = function () { + if (this instanceof bound) { + var result = target.apply( + this, + args.concat(slice.call(arguments)) + ); + if (Object(result) === result) { + return result; + } + return this; + } else { + return target.apply( + that, + args.concat(slice.call(arguments)) + ); + } + }; + + var boundLength = Math.max(0, target.length - args.length); + var boundArgs = []; + for (var i = 0; i < boundLength; i++) { + boundArgs.push('$' + i); + } + + bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; + } + + return bound; +}; + +},{}],10:[function(require,module,exports){ +'use strict'; + +var implementation = require('./implementation'); + +module.exports = Function.prototype.bind || implementation; + +},{"./implementation":9}],11:[function(require,module,exports){ +'use strict'; + +var undefined; + +var $SyntaxError = SyntaxError; +var $Function = Function; +var $TypeError = TypeError; + +// eslint-disable-next-line consistent-return +var getEvalledConstructor = function (expressionSyntax) { + try { + return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')(); + } catch (e) {} +}; + +var $gOPD = Object.getOwnPropertyDescriptor; +if ($gOPD) { + try { + $gOPD({}, ''); + } catch (e) { + $gOPD = null; // this is IE 8, which has a broken gOPD + } +} + +var throwTypeError = function () { + throw new $TypeError(); +}; +var ThrowTypeError = $gOPD + ? (function () { + try { + // eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties + arguments.callee; // IE 8 does not throw here + return throwTypeError; + } catch (calleeThrows) { + try { + // IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '') + return $gOPD(arguments, 'callee').get; + } catch (gOPDthrows) { + return throwTypeError; + } + } + }()) + : throwTypeError; + +var hasSymbols = require('has-symbols')(); + +var getProto = Object.getPrototypeOf || function (x) { return x.__proto__; }; // eslint-disable-line no-proto + +var needsEval = {}; + +var TypedArray = typeof Uint8Array === 'undefined' ? undefined : getProto(Uint8Array); + +var INTRINSICS = { + '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError, + '%Array%': Array, + '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, + '%ArrayIteratorPrototype%': hasSymbols ? getProto([][Symbol.iterator]()) : undefined, + '%AsyncFromSyncIteratorPrototype%': undefined, + '%AsyncFunction%': needsEval, + '%AsyncGenerator%': needsEval, + '%AsyncGeneratorFunction%': needsEval, + '%AsyncIteratorPrototype%': needsEval, + '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, + '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt, + '%Boolean%': Boolean, + '%DataView%': typeof DataView === 'undefined' ? undefined : DataView, + '%Date%': Date, + '%decodeURI%': decodeURI, + '%decodeURIComponent%': decodeURIComponent, + '%encodeURI%': encodeURI, + '%encodeURIComponent%': encodeURIComponent, + '%Error%': Error, + '%eval%': eval, // eslint-disable-line no-eval + '%EvalError%': EvalError, + '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, + '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, + '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry, + '%Function%': $Function, + '%GeneratorFunction%': needsEval, + '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, + '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, + '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, + '%isFinite%': isFinite, + '%isNaN%': isNaN, + '%IteratorPrototype%': hasSymbols ? getProto(getProto([][Symbol.iterator]())) : undefined, + '%JSON%': typeof JSON === 'object' ? JSON : undefined, + '%Map%': typeof Map === 'undefined' ? undefined : Map, + '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols ? undefined : getProto(new Map()[Symbol.iterator]()), + '%Math%': Math, + '%Number%': Number, + '%Object%': Object, + '%parseFloat%': parseFloat, + '%parseInt%': parseInt, + '%Promise%': typeof Promise === 'undefined' ? undefined : Promise, + '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, + '%RangeError%': RangeError, + '%ReferenceError%': ReferenceError, + '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, + '%RegExp%': RegExp, + '%Set%': typeof Set === 'undefined' ? undefined : Set, + '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols ? undefined : getProto(new Set()[Symbol.iterator]()), + '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, + '%String%': String, + '%StringIteratorPrototype%': hasSymbols ? getProto(''[Symbol.iterator]()) : undefined, + '%Symbol%': hasSymbols ? Symbol : undefined, + '%SyntaxError%': $SyntaxError, + '%ThrowTypeError%': ThrowTypeError, + '%TypedArray%': TypedArray, + '%TypeError%': $TypeError, + '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, + '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, + '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, + '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, + '%URIError%': URIError, + '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, + '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef, + '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet +}; + +var doEval = function doEval(name) { + var value; + if (name === '%AsyncFunction%') { + value = getEvalledConstructor('async function () {}'); + } else if (name === '%GeneratorFunction%') { + value = getEvalledConstructor('function* () {}'); + } else if (name === '%AsyncGeneratorFunction%') { + value = getEvalledConstructor('async function* () {}'); + } else if (name === '%AsyncGenerator%') { + var fn = doEval('%AsyncGeneratorFunction%'); + if (fn) { + value = fn.prototype; + } + } else if (name === '%AsyncIteratorPrototype%') { + var gen = doEval('%AsyncGenerator%'); + if (gen) { + value = getProto(gen.prototype); + } + } + + INTRINSICS[name] = value; + + return value; +}; + +var LEGACY_ALIASES = { + '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'], + '%ArrayPrototype%': ['Array', 'prototype'], + '%ArrayProto_entries%': ['Array', 'prototype', 'entries'], + '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'], + '%ArrayProto_keys%': ['Array', 'prototype', 'keys'], + '%ArrayProto_values%': ['Array', 'prototype', 'values'], + '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'], + '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'], + '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'], + '%BooleanPrototype%': ['Boolean', 'prototype'], + '%DataViewPrototype%': ['DataView', 'prototype'], + '%DatePrototype%': ['Date', 'prototype'], + '%ErrorPrototype%': ['Error', 'prototype'], + '%EvalErrorPrototype%': ['EvalError', 'prototype'], + '%Float32ArrayPrototype%': ['Float32Array', 'prototype'], + '%Float64ArrayPrototype%': ['Float64Array', 'prototype'], + '%FunctionPrototype%': ['Function', 'prototype'], + '%Generator%': ['GeneratorFunction', 'prototype'], + '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'], + '%Int8ArrayPrototype%': ['Int8Array', 'prototype'], + '%Int16ArrayPrototype%': ['Int16Array', 'prototype'], + '%Int32ArrayPrototype%': ['Int32Array', 'prototype'], + '%JSONParse%': ['JSON', 'parse'], + '%JSONStringify%': ['JSON', 'stringify'], + '%MapPrototype%': ['Map', 'prototype'], + '%NumberPrototype%': ['Number', 'prototype'], + '%ObjectPrototype%': ['Object', 'prototype'], + '%ObjProto_toString%': ['Object', 'prototype', 'toString'], + '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'], + '%PromisePrototype%': ['Promise', 'prototype'], + '%PromiseProto_then%': ['Promise', 'prototype', 'then'], + '%Promise_all%': ['Promise', 'all'], + '%Promise_reject%': ['Promise', 'reject'], + '%Promise_resolve%': ['Promise', 'resolve'], + '%RangeErrorPrototype%': ['RangeError', 'prototype'], + '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'], + '%RegExpPrototype%': ['RegExp', 'prototype'], + '%SetPrototype%': ['Set', 'prototype'], + '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'], + '%StringPrototype%': ['String', 'prototype'], + '%SymbolPrototype%': ['Symbol', 'prototype'], + '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'], + '%TypedArrayPrototype%': ['TypedArray', 'prototype'], + '%TypeErrorPrototype%': ['TypeError', 'prototype'], + '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'], + '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'], + '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'], + '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'], + '%URIErrorPrototype%': ['URIError', 'prototype'], + '%WeakMapPrototype%': ['WeakMap', 'prototype'], + '%WeakSetPrototype%': ['WeakSet', 'prototype'] +}; + +var bind = require('function-bind'); +var hasOwn = require('has'); +var $concat = bind.call(Function.call, Array.prototype.concat); +var $spliceApply = bind.call(Function.apply, Array.prototype.splice); +var $replace = bind.call(Function.call, String.prototype.replace); +var $strSlice = bind.call(Function.call, String.prototype.slice); + +/* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */ +var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g; +var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */ +var stringToPath = function stringToPath(string) { + var first = $strSlice(string, 0, 1); + var last = $strSlice(string, -1); + if (first === '%' && last !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`'); + } else if (last === '%' && first !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`'); + } + var result = []; + $replace(string, rePropName, function (match, number, quote, subString) { + result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match; + }); + return result; +}; +/* end adaptation */ + +var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) { + var intrinsicName = name; + var alias; + if (hasOwn(LEGACY_ALIASES, intrinsicName)) { + alias = LEGACY_ALIASES[intrinsicName]; + intrinsicName = '%' + alias[0] + '%'; + } + + if (hasOwn(INTRINSICS, intrinsicName)) { + var value = INTRINSICS[intrinsicName]; + if (value === needsEval) { + value = doEval(intrinsicName); + } + if (typeof value === 'undefined' && !allowMissing) { + throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); + } + + return { + alias: alias, + name: intrinsicName, + value: value + }; + } + + throw new $SyntaxError('intrinsic ' + name + ' does not exist!'); +}; + +module.exports = function GetIntrinsic(name, allowMissing) { + if (typeof name !== 'string' || name.length === 0) { + throw new $TypeError('intrinsic name must be a non-empty string'); + } + if (arguments.length > 1 && typeof allowMissing !== 'boolean') { + throw new $TypeError('"allowMissing" argument must be a boolean'); + } + + var parts = stringToPath(name); + var intrinsicBaseName = parts.length > 0 ? parts[0] : ''; + + var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing); + var intrinsicRealName = intrinsic.name; + var value = intrinsic.value; + var skipFurtherCaching = false; + + var alias = intrinsic.alias; + if (alias) { + intrinsicBaseName = alias[0]; + $spliceApply(parts, $concat([0, 1], alias)); + } + + for (var i = 1, isOwn = true; i < parts.length; i += 1) { + var part = parts[i]; + var first = $strSlice(part, 0, 1); + var last = $strSlice(part, -1); + if ( + ( + (first === '"' || first === "'" || first === '`') + || (last === '"' || last === "'" || last === '`') + ) + && first !== last + ) { + throw new $SyntaxError('property names with quotes must have matching quotes'); + } + if (part === 'constructor' || !isOwn) { + skipFurtherCaching = true; + } + + intrinsicBaseName += '.' + part; + intrinsicRealName = '%' + intrinsicBaseName + '%'; + + if (hasOwn(INTRINSICS, intrinsicRealName)) { + value = INTRINSICS[intrinsicRealName]; + } else if (value != null) { + if (!(part in value)) { + if (!allowMissing) { + throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.'); + } + return void undefined; + } + if ($gOPD && (i + 1) >= parts.length) { + var desc = $gOPD(value, part); + isOwn = !!desc; + + // By convention, when a data property is converted to an accessor + // property to emulate a data property that does not suffer from + // the override mistake, that accessor's getter is marked with + // an `originalValue` property. Here, when we detect this, we + // uphold the illusion by pretending to see that original data + // property, i.e., returning the value rather than the getter + // itself. + if (isOwn && 'get' in desc && !('originalValue' in desc.get)) { + value = desc.get; + } else { + value = value[part]; + } + } else { + isOwn = hasOwn(value, part); + value = value[part]; + } + + if (isOwn && !skipFurtherCaching) { + INTRINSICS[intrinsicRealName] = value; + } + } + } + return value; +}; + +},{"function-bind":10,"has":14,"has-symbols":12}],12:[function(require,module,exports){ +'use strict'; + +var origSymbol = typeof Symbol !== 'undefined' && Symbol; +var hasSymbolSham = require('./shams'); + +module.exports = function hasNativeSymbols() { + if (typeof origSymbol !== 'function') { return false; } + if (typeof Symbol !== 'function') { return false; } + if (typeof origSymbol('foo') !== 'symbol') { return false; } + if (typeof Symbol('bar') !== 'symbol') { return false; } + + return hasSymbolSham(); +}; + +},{"./shams":13}],13:[function(require,module,exports){ +'use strict'; + +/* eslint complexity: [2, 18], max-statements: [2, 33] */ +module.exports = function hasSymbols() { + if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } + if (typeof Symbol.iterator === 'symbol') { return true; } + + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + if (typeof sym === 'string') { return false; } + + if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } + if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } + + // temp disabled per https://github.com/ljharb/object.assign/issues/17 + // if (sym instanceof Symbol) { return false; } + // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 + // if (!(symObj instanceof Symbol)) { return false; } + + // if (typeof Symbol.prototype.toString !== 'function') { return false; } + // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } + + var symVal = 42; + obj[sym] = symVal; + for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop + if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } + + if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } + + var syms = Object.getOwnPropertySymbols(obj); + if (syms.length !== 1 || syms[0] !== sym) { return false; } + + if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } + + if (typeof Object.getOwnPropertyDescriptor === 'function') { + var descriptor = Object.getOwnPropertyDescriptor(obj, sym); + if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } + } + + return true; +}; + +},{}],14:[function(require,module,exports){ +'use strict'; + +var bind = require('function-bind'); + +module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); + +},{"function-bind":10}],15:[function(require,module,exports){ +var hasMap = typeof Map === 'function' && Map.prototype; +var mapSizeDescriptor = Object.getOwnPropertyDescriptor && hasMap ? Object.getOwnPropertyDescriptor(Map.prototype, 'size') : null; +var mapSize = hasMap && mapSizeDescriptor && typeof mapSizeDescriptor.get === 'function' ? mapSizeDescriptor.get : null; +var mapForEach = hasMap && Map.prototype.forEach; +var hasSet = typeof Set === 'function' && Set.prototype; +var setSizeDescriptor = Object.getOwnPropertyDescriptor && hasSet ? Object.getOwnPropertyDescriptor(Set.prototype, 'size') : null; +var setSize = hasSet && setSizeDescriptor && typeof setSizeDescriptor.get === 'function' ? setSizeDescriptor.get : null; +var setForEach = hasSet && Set.prototype.forEach; +var hasWeakMap = typeof WeakMap === 'function' && WeakMap.prototype; +var weakMapHas = hasWeakMap ? WeakMap.prototype.has : null; +var hasWeakSet = typeof WeakSet === 'function' && WeakSet.prototype; +var weakSetHas = hasWeakSet ? WeakSet.prototype.has : null; +var hasWeakRef = typeof WeakRef === 'function' && WeakRef.prototype; +var weakRefDeref = hasWeakRef ? WeakRef.prototype.deref : null; +var booleanValueOf = Boolean.prototype.valueOf; +var objectToString = Object.prototype.toString; +var functionToString = Function.prototype.toString; +var $match = String.prototype.match; +var $slice = String.prototype.slice; +var $replace = String.prototype.replace; +var $toUpperCase = String.prototype.toUpperCase; +var $toLowerCase = String.prototype.toLowerCase; +var $test = RegExp.prototype.test; +var $concat = Array.prototype.concat; +var $join = Array.prototype.join; +var $arrSlice = Array.prototype.slice; +var $floor = Math.floor; +var bigIntValueOf = typeof BigInt === 'function' ? BigInt.prototype.valueOf : null; +var gOPS = Object.getOwnPropertySymbols; +var symToString = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ? Symbol.prototype.toString : null; +var hasShammedSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'object'; +// ie, `has-tostringtag/shams +var toStringTag = typeof Symbol === 'function' && Symbol.toStringTag && (typeof Symbol.toStringTag === hasShammedSymbols ? 'object' : 'symbol') + ? Symbol.toStringTag + : null; +var isEnumerable = Object.prototype.propertyIsEnumerable; + +var gPO = (typeof Reflect === 'function' ? Reflect.getPrototypeOf : Object.getPrototypeOf) || ( + [].__proto__ === Array.prototype // eslint-disable-line no-proto + ? function (O) { + return O.__proto__; // eslint-disable-line no-proto + } + : null +); + +function addNumericSeparator(num, str) { + if ( + num === Infinity + || num === -Infinity + || num !== num + || (num && num > -1000 && num < 1000) + || $test.call(/e/, str) + ) { + return str; + } + var sepRegex = /[0-9](?=(?:[0-9]{3})+(?![0-9]))/g; + if (typeof num === 'number') { + var int = num < 0 ? -$floor(-num) : $floor(num); // trunc(num) + if (int !== num) { + var intStr = String(int); + var dec = $slice.call(str, intStr.length + 1); + return $replace.call(intStr, sepRegex, '$&_') + '.' + $replace.call($replace.call(dec, /([0-9]{3})/g, '$&_'), /_$/, ''); + } + } + return $replace.call(str, sepRegex, '$&_'); +} + +var utilInspect = require('./util.inspect'); +var inspectCustom = utilInspect.custom; +var inspectSymbol = isSymbol(inspectCustom) ? inspectCustom : null; + +module.exports = function inspect_(obj, options, depth, seen) { + var opts = options || {}; + + if (has(opts, 'quoteStyle') && (opts.quoteStyle !== 'single' && opts.quoteStyle !== 'double')) { + throw new TypeError('option "quoteStyle" must be "single" or "double"'); + } + if ( + has(opts, 'maxStringLength') && (typeof opts.maxStringLength === 'number' + ? opts.maxStringLength < 0 && opts.maxStringLength !== Infinity + : opts.maxStringLength !== null + ) + ) { + throw new TypeError('option "maxStringLength", if provided, must be a positive integer, Infinity, or `null`'); + } + var customInspect = has(opts, 'customInspect') ? opts.customInspect : true; + if (typeof customInspect !== 'boolean' && customInspect !== 'symbol') { + throw new TypeError('option "customInspect", if provided, must be `true`, `false`, or `\'symbol\'`'); + } + + if ( + has(opts, 'indent') + && opts.indent !== null + && opts.indent !== '\t' + && !(parseInt(opts.indent, 10) === opts.indent && opts.indent > 0) + ) { + throw new TypeError('option "indent" must be "\\t", an integer > 0, or `null`'); + } + if (has(opts, 'numericSeparator') && typeof opts.numericSeparator !== 'boolean') { + throw new TypeError('option "numericSeparator", if provided, must be `true` or `false`'); + } + var numericSeparator = opts.numericSeparator; + + if (typeof obj === 'undefined') { + return 'undefined'; + } + if (obj === null) { + return 'null'; + } + if (typeof obj === 'boolean') { + return obj ? 'true' : 'false'; + } + + if (typeof obj === 'string') { + return inspectString(obj, opts); + } + if (typeof obj === 'number') { + if (obj === 0) { + return Infinity / obj > 0 ? '0' : '-0'; + } + var str = String(obj); + return numericSeparator ? addNumericSeparator(obj, str) : str; + } + if (typeof obj === 'bigint') { + var bigIntStr = String(obj) + 'n'; + return numericSeparator ? addNumericSeparator(obj, bigIntStr) : bigIntStr; + } + + var maxDepth = typeof opts.depth === 'undefined' ? 5 : opts.depth; + if (typeof depth === 'undefined') { depth = 0; } + if (depth >= maxDepth && maxDepth > 0 && typeof obj === 'object') { + return isArray(obj) ? '[Array]' : '[Object]'; + } + + var indent = getIndent(opts, depth); + + if (typeof seen === 'undefined') { + seen = []; + } else if (indexOf(seen, obj) >= 0) { + return '[Circular]'; + } + + function inspect(value, from, noIndent) { + if (from) { + seen = $arrSlice.call(seen); + seen.push(from); + } + if (noIndent) { + var newOpts = { + depth: opts.depth + }; + if (has(opts, 'quoteStyle')) { + newOpts.quoteStyle = opts.quoteStyle; + } + return inspect_(value, newOpts, depth + 1, seen); + } + return inspect_(value, opts, depth + 1, seen); + } + + if (typeof obj === 'function' && !isRegExp(obj)) { // in older engines, regexes are callable + var name = nameOf(obj); + var keys = arrObjKeys(obj, inspect); + return '[Function' + (name ? ': ' + name : ' (anonymous)') + ']' + (keys.length > 0 ? ' { ' + $join.call(keys, ', ') + ' }' : ''); + } + if (isSymbol(obj)) { + var symString = hasShammedSymbols ? $replace.call(String(obj), /^(Symbol\(.*\))_[^)]*$/, '$1') : symToString.call(obj); + return typeof obj === 'object' && !hasShammedSymbols ? markBoxed(symString) : symString; + } + if (isElement(obj)) { + var s = '<' + $toLowerCase.call(String(obj.nodeName)); + var attrs = obj.attributes || []; + for (var i = 0; i < attrs.length; i++) { + s += ' ' + attrs[i].name + '=' + wrapQuotes(quote(attrs[i].value), 'double', opts); + } + s += '>'; + if (obj.childNodes && obj.childNodes.length) { s += '...'; } + s += ''; + return s; + } + if (isArray(obj)) { + if (obj.length === 0) { return '[]'; } + var xs = arrObjKeys(obj, inspect); + if (indent && !singleLineValues(xs)) { + return '[' + indentedJoin(xs, indent) + ']'; + } + return '[ ' + $join.call(xs, ', ') + ' ]'; + } + if (isError(obj)) { + var parts = arrObjKeys(obj, inspect); + if (!('cause' in Error.prototype) && 'cause' in obj && !isEnumerable.call(obj, 'cause')) { + return '{ [' + String(obj) + '] ' + $join.call($concat.call('[cause]: ' + inspect(obj.cause), parts), ', ') + ' }'; + } + if (parts.length === 0) { return '[' + String(obj) + ']'; } + return '{ [' + String(obj) + '] ' + $join.call(parts, ', ') + ' }'; + } + if (typeof obj === 'object' && customInspect) { + if (inspectSymbol && typeof obj[inspectSymbol] === 'function' && utilInspect) { + return utilInspect(obj, { depth: maxDepth - depth }); + } else if (customInspect !== 'symbol' && typeof obj.inspect === 'function') { + return obj.inspect(); + } + } + if (isMap(obj)) { + var mapParts = []; + mapForEach.call(obj, function (value, key) { + mapParts.push(inspect(key, obj, true) + ' => ' + inspect(value, obj)); + }); + return collectionOf('Map', mapSize.call(obj), mapParts, indent); + } + if (isSet(obj)) { + var setParts = []; + setForEach.call(obj, function (value) { + setParts.push(inspect(value, obj)); + }); + return collectionOf('Set', setSize.call(obj), setParts, indent); + } + if (isWeakMap(obj)) { + return weakCollectionOf('WeakMap'); + } + if (isWeakSet(obj)) { + return weakCollectionOf('WeakSet'); + } + if (isWeakRef(obj)) { + return weakCollectionOf('WeakRef'); + } + if (isNumber(obj)) { + return markBoxed(inspect(Number(obj))); + } + if (isBigInt(obj)) { + return markBoxed(inspect(bigIntValueOf.call(obj))); + } + if (isBoolean(obj)) { + return markBoxed(booleanValueOf.call(obj)); + } + if (isString(obj)) { + return markBoxed(inspect(String(obj))); + } + if (!isDate(obj) && !isRegExp(obj)) { + var ys = arrObjKeys(obj, inspect); + var isPlainObject = gPO ? gPO(obj) === Object.prototype : obj instanceof Object || obj.constructor === Object; + var protoTag = obj instanceof Object ? '' : 'null prototype'; + var stringTag = !isPlainObject && toStringTag && Object(obj) === obj && toStringTag in obj ? $slice.call(toStr(obj), 8, -1) : protoTag ? 'Object' : ''; + var constructorTag = isPlainObject || typeof obj.constructor !== 'function' ? '' : obj.constructor.name ? obj.constructor.name + ' ' : ''; + var tag = constructorTag + (stringTag || protoTag ? '[' + $join.call($concat.call([], stringTag || [], protoTag || []), ': ') + '] ' : ''); + if (ys.length === 0) { return tag + '{}'; } + if (indent) { + return tag + '{' + indentedJoin(ys, indent) + '}'; + } + return tag + '{ ' + $join.call(ys, ', ') + ' }'; + } + return String(obj); +}; + +function wrapQuotes(s, defaultStyle, opts) { + var quoteChar = (opts.quoteStyle || defaultStyle) === 'double' ? '"' : "'"; + return quoteChar + s + quoteChar; +} + +function quote(s) { + return $replace.call(String(s), /"/g, '"'); +} + +function isArray(obj) { return toStr(obj) === '[object Array]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isDate(obj) { return toStr(obj) === '[object Date]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isRegExp(obj) { return toStr(obj) === '[object RegExp]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isError(obj) { return toStr(obj) === '[object Error]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isString(obj) { return toStr(obj) === '[object String]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isNumber(obj) { return toStr(obj) === '[object Number]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isBoolean(obj) { return toStr(obj) === '[object Boolean]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } + +// Symbol and BigInt do have Symbol.toStringTag by spec, so that can't be used to eliminate false positives +function isSymbol(obj) { + if (hasShammedSymbols) { + return obj && typeof obj === 'object' && obj instanceof Symbol; + } + if (typeof obj === 'symbol') { + return true; + } + if (!obj || typeof obj !== 'object' || !symToString) { + return false; + } + try { + symToString.call(obj); + return true; + } catch (e) {} + return false; +} + +function isBigInt(obj) { + if (!obj || typeof obj !== 'object' || !bigIntValueOf) { + return false; + } + try { + bigIntValueOf.call(obj); + return true; + } catch (e) {} + return false; +} + +var hasOwn = Object.prototype.hasOwnProperty || function (key) { return key in this; }; +function has(obj, key) { + return hasOwn.call(obj, key); +} + +function toStr(obj) { + return objectToString.call(obj); +} + +function nameOf(f) { + if (f.name) { return f.name; } + var m = $match.call(functionToString.call(f), /^function\s*([\w$]+)/); + if (m) { return m[1]; } + return null; +} + +function indexOf(xs, x) { + if (xs.indexOf) { return xs.indexOf(x); } + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) { return i; } + } + return -1; +} + +function isMap(x) { + if (!mapSize || !x || typeof x !== 'object') { + return false; + } + try { + mapSize.call(x); + try { + setSize.call(x); + } catch (s) { + return true; + } + return x instanceof Map; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isWeakMap(x) { + if (!weakMapHas || !x || typeof x !== 'object') { + return false; + } + try { + weakMapHas.call(x, weakMapHas); + try { + weakSetHas.call(x, weakSetHas); + } catch (s) { + return true; + } + return x instanceof WeakMap; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isWeakRef(x) { + if (!weakRefDeref || !x || typeof x !== 'object') { + return false; + } + try { + weakRefDeref.call(x); + return true; + } catch (e) {} + return false; +} + +function isSet(x) { + if (!setSize || !x || typeof x !== 'object') { + return false; + } + try { + setSize.call(x); + try { + mapSize.call(x); + } catch (m) { + return true; + } + return x instanceof Set; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isWeakSet(x) { + if (!weakSetHas || !x || typeof x !== 'object') { + return false; + } + try { + weakSetHas.call(x, weakSetHas); + try { + weakMapHas.call(x, weakMapHas); + } catch (s) { + return true; + } + return x instanceof WeakSet; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} + +function isElement(x) { + if (!x || typeof x !== 'object') { return false; } + if (typeof HTMLElement !== 'undefined' && x instanceof HTMLElement) { + return true; + } + return typeof x.nodeName === 'string' && typeof x.getAttribute === 'function'; +} + +function inspectString(str, opts) { + if (str.length > opts.maxStringLength) { + var remaining = str.length - opts.maxStringLength; + var trailer = '... ' + remaining + ' more character' + (remaining > 1 ? 's' : ''); + return inspectString($slice.call(str, 0, opts.maxStringLength), opts) + trailer; + } + // eslint-disable-next-line no-control-regex + var s = $replace.call($replace.call(str, /(['\\])/g, '\\$1'), /[\x00-\x1f]/g, lowbyte); + return wrapQuotes(s, 'single', opts); +} + +function lowbyte(c) { + var n = c.charCodeAt(0); + var x = { + 8: 'b', + 9: 't', + 10: 'n', + 12: 'f', + 13: 'r' + }[n]; + if (x) { return '\\' + x; } + return '\\x' + (n < 0x10 ? '0' : '') + $toUpperCase.call(n.toString(16)); +} + +function markBoxed(str) { + return 'Object(' + str + ')'; +} + +function weakCollectionOf(type) { + return type + ' { ? }'; +} + +function collectionOf(type, size, entries, indent) { + var joinedEntries = indent ? indentedJoin(entries, indent) : $join.call(entries, ', '); + return type + ' (' + size + ') {' + joinedEntries + '}'; +} + +function singleLineValues(xs) { + for (var i = 0; i < xs.length; i++) { + if (indexOf(xs[i], '\n') >= 0) { + return false; + } + } + return true; +} + +function getIndent(opts, depth) { + var baseIndent; + if (opts.indent === '\t') { + baseIndent = '\t'; + } else if (typeof opts.indent === 'number' && opts.indent > 0) { + baseIndent = $join.call(Array(opts.indent + 1), ' '); + } else { + return null; + } + return { + base: baseIndent, + prev: $join.call(Array(depth + 1), baseIndent) + }; +} + +function indentedJoin(xs, indent) { + if (xs.length === 0) { return ''; } + var lineJoiner = '\n' + indent.prev + indent.base; + return lineJoiner + $join.call(xs, ',' + lineJoiner) + '\n' + indent.prev; +} + +function arrObjKeys(obj, inspect) { + var isArr = isArray(obj); + var xs = []; + if (isArr) { + xs.length = obj.length; + for (var i = 0; i < obj.length; i++) { + xs[i] = has(obj, i) ? inspect(obj[i], obj) : ''; + } + } + var syms = typeof gOPS === 'function' ? gOPS(obj) : []; + var symMap; + if (hasShammedSymbols) { + symMap = {}; + for (var k = 0; k < syms.length; k++) { + symMap['$' + syms[k]] = syms[k]; + } + } + + for (var key in obj) { // eslint-disable-line no-restricted-syntax + if (!has(obj, key)) { continue; } // eslint-disable-line no-restricted-syntax, no-continue + if (isArr && String(Number(key)) === key && key < obj.length) { continue; } // eslint-disable-line no-restricted-syntax, no-continue + if (hasShammedSymbols && symMap['$' + key] instanceof Symbol) { + // this is to prevent shammed Symbols, which are stored as strings, from being included in the string key section + continue; // eslint-disable-line no-restricted-syntax, no-continue + } else if ($test.call(/[^\w$]/, key)) { + xs.push(inspect(key, obj) + ': ' + inspect(obj[key], obj)); + } else { + xs.push(key + ': ' + inspect(obj[key], obj)); + } + } + if (typeof gOPS === 'function') { + for (var j = 0; j < syms.length; j++) { + if (isEnumerable.call(obj, syms[j])) { + xs.push('[' + inspect(syms[j]) + ']: ' + inspect(obj[syms[j]], obj)); + } + } + } + return xs; +} + +},{"./util.inspect":6}],16:[function(require,module,exports){ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); +var callBound = require('call-bind/callBound'); +var inspect = require('object-inspect'); + +var $TypeError = GetIntrinsic('%TypeError%'); +var $WeakMap = GetIntrinsic('%WeakMap%', true); +var $Map = GetIntrinsic('%Map%', true); + +var $weakMapGet = callBound('WeakMap.prototype.get', true); +var $weakMapSet = callBound('WeakMap.prototype.set', true); +var $weakMapHas = callBound('WeakMap.prototype.has', true); +var $mapGet = callBound('Map.prototype.get', true); +var $mapSet = callBound('Map.prototype.set', true); +var $mapHas = callBound('Map.prototype.has', true); + +/* + * This function traverses the list returning the node corresponding to the + * given key. + * + * That node is also moved to the head of the list, so that if it's accessed + * again we don't need to traverse the whole list. By doing so, all the recently + * used nodes can be accessed relatively quickly. + */ +var listGetNode = function (list, key) { // eslint-disable-line consistent-return + for (var prev = list, curr; (curr = prev.next) !== null; prev = curr) { + if (curr.key === key) { + prev.next = curr.next; + curr.next = list.next; + list.next = curr; // eslint-disable-line no-param-reassign + return curr; + } + } +}; + +var listGet = function (objects, key) { + var node = listGetNode(objects, key); + return node && node.value; +}; +var listSet = function (objects, key, value) { + var node = listGetNode(objects, key); + if (node) { + node.value = value; + } else { + // Prepend the new node to the beginning of the list + objects.next = { // eslint-disable-line no-param-reassign + key: key, + next: objects.next, + value: value + }; + } +}; +var listHas = function (objects, key) { + return !!listGetNode(objects, key); +}; + +module.exports = function getSideChannel() { + var $wm; + var $m; + var $o; + var channel = { + assert: function (key) { + if (!channel.has(key)) { + throw new $TypeError('Side channel does not contain ' + inspect(key)); + } + }, + get: function (key) { // eslint-disable-line consistent-return + if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) { + if ($wm) { + return $weakMapGet($wm, key); + } + } else if ($Map) { + if ($m) { + return $mapGet($m, key); + } + } else { + if ($o) { // eslint-disable-line no-lonely-if + return listGet($o, key); + } + } + }, + has: function (key) { + if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) { + if ($wm) { + return $weakMapHas($wm, key); + } + } else if ($Map) { + if ($m) { + return $mapHas($m, key); + } + } else { + if ($o) { // eslint-disable-line no-lonely-if + return listHas($o, key); + } + } + return false; + }, + set: function (key, value) { + if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) { + if (!$wm) { + $wm = new $WeakMap(); + } + $weakMapSet($wm, key, value); + } else if ($Map) { + if (!$m) { + $m = new $Map(); + } + $mapSet($m, key, value); + } else { + if (!$o) { + /* + * Initialize the linked list as an empty node, so that we don't have + * to special-case handling of the first node: we can always refer to + * it as (previous node).next, instead of something like (list).head + */ + $o = { key: {}, next: null }; + } + listSet($o, key, value); + } + } + }; + return channel; +}; + +},{"call-bind/callBound":7,"get-intrinsic":11,"object-inspect":15}]},{},[2])(2) +}); diff --git a/node_modules/qs/lib/formats.js b/node_modules/qs/lib/formats.js new file mode 100644 index 0000000..f36cf20 --- /dev/null +++ b/node_modules/qs/lib/formats.js @@ -0,0 +1,23 @@ +'use strict'; + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; + +var Format = { + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; + +module.exports = { + 'default': Format.RFC3986, + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return String(value); + } + }, + RFC1738: Format.RFC1738, + RFC3986: Format.RFC3986 +}; diff --git a/node_modules/qs/lib/index.js b/node_modules/qs/lib/index.js new file mode 100644 index 0000000..0d6a97d --- /dev/null +++ b/node_modules/qs/lib/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var stringify = require('./stringify'); +var parse = require('./parse'); +var formats = require('./formats'); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; diff --git a/node_modules/qs/lib/parse.js b/node_modules/qs/lib/parse.js new file mode 100644 index 0000000..a4ac4fa --- /dev/null +++ b/node_modules/qs/lib/parse.js @@ -0,0 +1,263 @@ +'use strict'; + +var utils = require('./utils'); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var defaults = { + allowDots: false, + allowPrototypes: false, + allowSparse: false, + arrayLimit: 20, + charset: 'utf-8', + charsetSentinel: false, + comma: false, + decoder: utils.decode, + delimiter: '&', + depth: 5, + ignoreQueryPrefix: false, + interpretNumericEntities: false, + parameterLimit: 1000, + parseArrays: true, + plainObjects: false, + strictNullHandling: false +}; + +var interpretNumericEntities = function (str) { + return str.replace(/&#(\d+);/g, function ($0, numberStr) { + return String.fromCharCode(parseInt(numberStr, 10)); + }); +}; + +var parseArrayValue = function (val, options) { + if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) { + return val.split(','); + } + + return val; +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = utils.maybeMap( + parseArrayValue(part.slice(pos + 1), options), + function (encodedVal) { + return options.decoder(encodedVal, defaults.decoder, charset, 'value'); + } + ); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options, valuesParsed) { + var leaf = valuesParsed ? val : parseArrayValue(val, options); + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + if (!options.parseArrays && cleanRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== cleanRoot + && String(index) === cleanRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else if (cleanRoot !== '__proto__') { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options, valuesParsed); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + allowSparse: typeof opts.allowSparse === 'boolean' ? opts.allowSparse : defaults.allowSparse, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + // eslint-disable-next-line no-implicit-coercion, no-extra-parens + depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string'); + obj = utils.merge(obj, newObj, options); + } + + if (options.allowSparse === true) { + return obj; + } + + return utils.compact(obj); +}; diff --git a/node_modules/qs/lib/stringify.js b/node_modules/qs/lib/stringify.js new file mode 100644 index 0000000..48ec030 --- /dev/null +++ b/node_modules/qs/lib/stringify.js @@ -0,0 +1,326 @@ +'use strict'; + +var getSideChannel = require('side-channel'); +var utils = require('./utils'); +var formats = require('./formats'); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var isArray = Array.isArray; +var split = String.prototype.split; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' + || typeof v === 'number' + || typeof v === 'boolean' + || typeof v === 'symbol' + || typeof v === 'bigint'; +}; + +var sentinel = {}; + +var stringify = function stringify( + object, + prefix, + generateArrayPrefix, + commaRoundTrip, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + sideChannel +) { + var obj = object; + + var tmpSc = sideChannel; + var step = 0; + var findFlag = false; + while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) { + // Where object last appeared in the ref tree + var pos = tmpSc.get(object); + step += 1; + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } else { + findFlag = true; // Break while + } + } + if (typeof tmpSc.get(sentinel) === 'undefined') { + step = 0; + } + } + + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = utils.maybeMap(obj, function (value) { + if (value instanceof Date) { + return serializeDate(value); + } + return value; + }); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key', format) : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format); + if (generateArrayPrefix === 'comma' && encodeValuesOnly) { + var valuesArray = split.call(String(obj), ','); + var valuesJoined = ''; + for (var i = 0; i < valuesArray.length; ++i) { + valuesJoined += (i === 0 ? '' : ',') + formatter(encoder(valuesArray[i], defaults.encoder, charset, 'value', format)); + } + return [formatter(keyValue) + (commaRoundTrip && isArray(obj) && valuesArray.length === 1 ? '[]' : '') + '=' + valuesJoined]; + } + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (generateArrayPrefix === 'comma' && isArray(obj)) { + // we need to join elements in + objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; + } else if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + var adjustedPrefix = commaRoundTrip && isArray(obj) && obj.length === 1 ? prefix + '[]' : prefix; + + for (var j = 0; j < objKeys.length; ++j) { + var key = objKeys[j]; + var value = typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key]; + + if (skipNulls && value === null) { + continue; + } + + var keyPrefix = isArray(obj) + ? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(adjustedPrefix, key) : adjustedPrefix + : adjustedPrefix + (allowDots ? '.' + key : '[' + key + ']'); + + sideChannel.set(object, step); + var valueSideChannel = getSideChannel(); + valueSideChannel.set(sentinel, sideChannel); + pushToArray(values, stringify( + value, + keyPrefix, + generateArrayPrefix, + commaRoundTrip, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + valueSideChannel + )); + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + if (opts && 'commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { + throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); + } + var commaRoundTrip = generateArrayPrefix === 'comma' && opts && opts.commaRoundTrip; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + var sideChannel = getSideChannel(); + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + commaRoundTrip, + options.strictNullHandling, + options.skipNulls, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.format, + options.formatter, + options.encodeValuesOnly, + options.charset, + sideChannel + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; diff --git a/node_modules/qs/lib/utils.js b/node_modules/qs/lib/utils.js new file mode 100644 index 0000000..1e54538 --- /dev/null +++ b/node_modules/qs/lib/utils.js @@ -0,0 +1,252 @@ +'use strict'; + +var formats = require('./formats'); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + /* eslint no-param-reassign: 0 */ + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset, kind, format) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + || (format === formats.RFC1738 && (c === 0x28 || c === 0x29)) // ( ) + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + /* eslint operator-linebreak: [2, "before"] */ + out += hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +var maybeMap = function maybeMap(val, fn) { + if (isArray(val)) { + var mapped = []; + for (var i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + maybeMap: maybeMap, + merge: merge +}; diff --git a/node_modules/qs/package.json b/node_modules/qs/package.json new file mode 100644 index 0000000..2ff42f3 --- /dev/null +++ b/node_modules/qs/package.json @@ -0,0 +1,77 @@ +{ + "name": "qs", + "description": "A querystring parser that supports nesting and arrays, with a depth limit", + "homepage": "https://github.com/ljharb/qs", + "version": "6.11.0", + "repository": { + "type": "git", + "url": "https://github.com/ljharb/qs.git" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "main": "lib/index.js", + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "keywords": [ + "querystring", + "qs", + "query", + "url", + "parse", + "stringify" + ], + "engines": { + "node": ">=0.6" + }, + "dependencies": { + "side-channel": "^1.0.4" + }, + "devDependencies": { + "@ljharb/eslint-config": "^21.0.0", + "aud": "^2.0.0", + "browserify": "^16.5.2", + "eclint": "^2.8.1", + "eslint": "=8.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "has-symbols": "^1.0.3", + "iconv-lite": "^0.5.1", + "in-publish": "^2.0.1", + "mkdirp": "^0.5.5", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "object-inspect": "^1.12.2", + "qs-iconv": "^1.0.4", + "safe-publish-latest": "^2.0.0", + "safer-buffer": "^2.1.2", + "tape": "^5.5.3" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=autogenerated", + "prepublishOnly": "safe-publish-latest && npm run dist", + "prepublish": "not-in-publish || npm run prepublishOnly", + "pretest": "npm run --silent readme && npm run --silent lint", + "test": "npm run tests-only", + "tests-only": "nyc tape 'test/**/*.js'", + "posttest": "aud --production", + "readme": "evalmd README.md", + "postlint": "eclint check $(git ls-files | xargs find 2> /dev/null | grep -vE 'node_modules|\\.git' | grep -v dist/)", + "lint": "eslint --ext=js,mjs .", + "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js" + }, + "license": "BSD-3-Clause", + "publishConfig": { + "ignore": [ + "!dist/*", + "bower.json", + "component.json", + ".github/workflows" + ] + } +} diff --git a/node_modules/qs/test/parse.js b/node_modules/qs/test/parse.js new file mode 100644 index 0000000..7d7b4dd --- /dev/null +++ b/node_modules/qs/test/parse.js @@ -0,0 +1,855 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var utils = require('../lib/utils'); +var iconv = require('iconv-lite'); +var SaferBuffer = require('safer-buffer').Buffer; + +test('parse()', function (t) { + t.test('parses a simple string', function (st) { + st.deepEqual(qs.parse('0=foo'), { 0: 'foo' }); + st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); + st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); + st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); + st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); + st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); + st.deepEqual(qs.parse('foo'), { foo: '' }); + st.deepEqual(qs.parse('foo='), { foo: '' }); + st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); + st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); + st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); + st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); + st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); + st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); + st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); + st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { + cht: 'p3', + chd: 't:60,40', + chs: '250x100', + chl: 'Hello|World' + }); + st.end(); + }); + + t.test('arrayFormat: brackets allows only explicit arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'brackets' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'brackets' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('arrayFormat: indices allows only indexed arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'indices' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'indices' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('arrayFormat: comma allows only comma-separated arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'comma' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'comma' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('arrayFormat: repeat allows only repeated values', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b,c', { arrayFormat: 'repeat' }), { a: 'b,c' }); + st.deepEqual(qs.parse('a=b&a=c', { arrayFormat: 'repeat' }), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('allows enabling dot notation', function (st) { + st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); + st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); + t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); + t.deepEqual( + qs.parse('a[b][c][d][e][f][g][h]=i'), + { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, + 'defaults to a depth of 5' + ); + + t.test('only parses one level when depth = 1', function (st) { + st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); + st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); + st.end(); + }); + + t.test('uses original key when depth = 0', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { depth: 0 }), { 'a[0]': 'b', 'a[1]': 'c' }); + st.deepEqual(qs.parse('a[0][0]=b&a[0][1]=c&a[1]=d&e=2', { depth: 0 }), { 'a[0][0]': 'b', 'a[0][1]': 'c', 'a[1]': 'd', e: '2' }); + st.end(); + }); + + t.test('uses original key when depth = false', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { depth: false }), { 'a[0]': 'b', 'a[1]': 'c' }); + st.deepEqual(qs.parse('a[0][0]=b&a[0][1]=c&a[1]=d&e=2', { depth: false }), { 'a[0][0]': 'b', 'a[0][1]': 'c', 'a[1]': 'd', e: '2' }); + st.end(); + }); + + t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); + + t.test('parses an explicit array', function (st) { + st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); + st.end(); + }); + + t.test('parses a mix of simple and explicit arrays', function (st) { + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a[1]=b&a=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a=b&a[1]=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + + st.end(); + }); + + t.test('parses a nested array', function (st) { + st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); + st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); + st.end(); + }); + + t.test('allows to specify array indices', function (st) { + st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); + st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 20 }), { a: ['c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 0 }), { a: { 1: 'c' } }); + st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); + st.end(); + }); + + t.test('limits specific array indices to arrayLimit', function (st) { + st.deepEqual(qs.parse('a[20]=a', { arrayLimit: 20 }), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a', { arrayLimit: 20 }), { a: { 21: 'a' } }); + + st.deepEqual(qs.parse('a[20]=a'), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a'), { a: { 21: 'a' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); + + t.test('supports encoded = signs', function (st) { + st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); + st.end(); + }); + + t.test('is ok with url encoded strings', function (st) { + st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); + st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); + st.end(); + }); + + t.test('allows brackets in the value', function (st) { + st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); + st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); + st.end(); + }); + + t.test('allows empty values', function (st) { + st.deepEqual(qs.parse(''), {}); + st.deepEqual(qs.parse(null), {}); + st.deepEqual(qs.parse(undefined), {}); + st.end(); + }); + + t.test('transforms arrays to objects', function (st) { + st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: false }), { a: { 0: 'b', t: 'u' } }); + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: true }), { a: { 0: 'b', t: 'u', hasOwnProperty: 'c' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: false }), { a: { 0: 'b', x: 'y' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: true }), { a: { 0: 'b', hasOwnProperty: 'c', x: 'y' } }); + st.end(); + }); + + t.test('transforms arrays to objects (dot notation)', function (st) { + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); + st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); + st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.end(); + }); + + t.test('correctly prunes undefined values when converting an array to an object', function (st) { + st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { 2: 'b', 99999999: 'c' } }); + st.end(); + }); + + t.test('supports malformed uri characters', function (st) { + st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); + st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); + st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); + st.end(); + }); + + t.test('doesn\'t produce empty keys', function (st) { + st.deepEqual(qs.parse('_r=1&'), { _r: '1' }); + st.end(); + }); + + t.test('cannot access Object prototype', function (st) { + qs.parse('constructor[prototype][bad]=bad'); + qs.parse('bad[constructor][prototype][bad]=bad'); + st.equal(typeof Object.prototype.bad, 'undefined'); + st.end(); + }); + + t.test('parses arrays of objects', function (st) { + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); + st.end(); + }); + + t.test('allows for empty strings in arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); + + st.deepEqual( + qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 20 + array indices: null then empty string works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]&a[]=c&a[]=', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 0 + array brackets: null then empty string works' + ); + + st.deepEqual( + qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 20 + array indices: empty string then null works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]=&a[]=c&a[]', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 0 + array brackets: empty string then null works' + ); + + st.deepEqual( + qs.parse('a[]=&a[]=b&a[]=c'), + { a: ['', 'b', 'c'] }, + 'array brackets: empty strings work' + ); + st.end(); + }); + + t.test('compacts sparse arrays', function (st) { + st.deepEqual(qs.parse('a[10]=1&a[2]=2', { arrayLimit: 20 }), { a: ['2', '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1', { arrayLimit: 20 }), { a: [{ b: [{ c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1', { arrayLimit: 20 }), { a: [[[{ c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1', { arrayLimit: 20 }), { a: [[[{ c: ['1'] }]]] }); + st.end(); + }); + + t.test('parses sparse arrays', function (st) { + /* eslint no-sparse-arrays: 0 */ + st.deepEqual(qs.parse('a[4]=1&a[1]=2', { allowSparse: true }), { a: [, '2', , , '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1', { allowSparse: true }), { a: [, { b: [, , { c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1', { allowSparse: true }), { a: [, [, , [, , , { c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1', { allowSparse: true }), { a: [, [, , [, , , { c: [, '1'] }]]] }); + st.end(); + }); + + t.test('parses semi-parsed strings', function (st) { + st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); + st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); + st.end(); + }); + + t.test('parses buffers correctly', function (st) { + var b = SaferBuffer.from('test'); + st.deepEqual(qs.parse({ a: b }), { a: b }); + st.end(); + }); + + t.test('parses jquery-param strings', function (st) { + // readable = 'filter[0][]=int1&filter[0][]==&filter[0][]=77&filter[]=and&filter[2][]=int2&filter[2][]==&filter[2][]=8' + var encoded = 'filter%5B0%5D%5B%5D=int1&filter%5B0%5D%5B%5D=%3D&filter%5B0%5D%5B%5D=77&filter%5B%5D=and&filter%5B2%5D%5B%5D=int2&filter%5B2%5D%5B%5D=%3D&filter%5B2%5D%5B%5D=8'; + var expected = { filter: [['int1', '=', '77'], 'and', ['int2', '=', '8']] }; + st.deepEqual(qs.parse(encoded), expected); + st.end(); + }); + + t.test('continues parsing when no parent is found', function (st) { + st.deepEqual(qs.parse('[]=&a=b'), { 0: '', a: 'b' }); + st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { 0: null, a: 'b' }); + st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); + st.end(); + }); + + t.test('does not error when parsing a very long array', function (st) { + var str = 'a[]=a'; + while (Buffer.byteLength(str) < 128 * 1024) { + str = str + '&' + str; + } + + st.doesNotThrow(function () { + qs.parse(str); + }); + + st.end(); + }); + + t.test('should not throw when a native prototype has an enumerable property', function (st) { + Object.prototype.crash = ''; + Array.prototype.crash = ''; + st.doesNotThrow(qs.parse.bind(null, 'a=b')); + st.deepEqual(qs.parse('a=b'), { a: 'b' }); + st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + delete Object.prototype.crash; + delete Array.prototype.crash; + st.end(); + }); + + t.test('parses a string with an alternative string delimiter', function (st) { + st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('parses a string with an alternative RegExp delimiter', function (st) { + st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not use non-splittable objects as delimiters', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding parameter limit', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); + st.end(); + }); + + t.test('allows setting the parameter limit to Infinity', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding array limit', function (st) { + st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { 0: 'b' } }); + st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { 0: 'b', 1: 'c' } }); + st.end(); + }); + + t.test('allows disabling array parsing', function (st) { + var indices = qs.parse('a[0]=b&a[1]=c', { parseArrays: false }); + st.deepEqual(indices, { a: { 0: 'b', 1: 'c' } }); + st.equal(Array.isArray(indices.a), false, 'parseArrays:false, indices case is not an array'); + + var emptyBrackets = qs.parse('a[]=b', { parseArrays: false }); + st.deepEqual(emptyBrackets, { a: { 0: 'b' } }); + st.equal(Array.isArray(emptyBrackets.a), false, 'parseArrays:false, empty brackets case is not an array'); + + st.end(); + }); + + t.test('allows for query string prefix', function (st) { + st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); + st.deepEqual(qs.parse('foo=bar', { ignoreQueryPrefix: true }), { foo: 'bar' }); + st.deepEqual(qs.parse('?foo=bar', { ignoreQueryPrefix: false }), { '?foo': 'bar' }); + + st.end(); + }); + + t.test('parses an object', function (st) { + var input = { + 'user[name]': { 'pop[bob]': 3 }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses string with comma as array divider', function (st) { + st.deepEqual(qs.parse('foo=bar,tee', { comma: true }), { foo: ['bar', 'tee'] }); + st.deepEqual(qs.parse('foo[bar]=coffee,tee', { comma: true }), { foo: { bar: ['coffee', 'tee'] } }); + st.deepEqual(qs.parse('foo=', { comma: true }), { foo: '' }); + st.deepEqual(qs.parse('foo', { comma: true }), { foo: '' }); + st.deepEqual(qs.parse('foo', { comma: true, strictNullHandling: true }), { foo: null }); + + // test cases inversed from from stringify tests + st.deepEqual(qs.parse('a[0]=c'), { a: ['c'] }); + st.deepEqual(qs.parse('a[]=c'), { a: ['c'] }); + st.deepEqual(qs.parse('a[]=c', { comma: true }), { a: ['c'] }); + + st.deepEqual(qs.parse('a[0]=c&a[1]=d'), { a: ['c', 'd'] }); + st.deepEqual(qs.parse('a[]=c&a[]=d'), { a: ['c', 'd'] }); + st.deepEqual(qs.parse('a=c,d', { comma: true }), { a: ['c', 'd'] }); + + st.end(); + }); + + t.test('parses values with comma as array divider', function (st) { + st.deepEqual(qs.parse({ foo: 'bar,tee' }, { comma: false }), { foo: 'bar,tee' }); + st.deepEqual(qs.parse({ foo: 'bar,tee' }, { comma: true }), { foo: ['bar', 'tee'] }); + st.end(); + }); + + t.test('use number decoder, parses string that has one number with comma option enabled', function (st) { + var decoder = function (str, defaultDecoder, charset, type) { + if (!isNaN(Number(str))) { + return parseFloat(str); + } + return defaultDecoder(str, defaultDecoder, charset, type); + }; + + st.deepEqual(qs.parse('foo=1', { comma: true, decoder: decoder }), { foo: 1 }); + st.deepEqual(qs.parse('foo=0', { comma: true, decoder: decoder }), { foo: 0 }); + + st.end(); + }); + + t.test('parses brackets holds array of arrays when having two parts of strings with comma as array divider', function (st) { + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=4,5,6', { comma: true }), { foo: [['1', '2', '3'], ['4', '5', '6']] }); + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=', { comma: true }), { foo: [['1', '2', '3'], ''] }); + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=,', { comma: true }), { foo: [['1', '2', '3'], ['', '']] }); + st.deepEqual(qs.parse('foo[]=1,2,3&foo[]=a', { comma: true }), { foo: [['1', '2', '3'], 'a'] }); + + st.end(); + }); + + t.test('parses comma delimited array while having percent-encoded comma treated as normal text', function (st) { + st.deepEqual(qs.parse('foo=a%2Cb', { comma: true }), { foo: 'a,b' }); + st.deepEqual(qs.parse('foo=a%2C%20b,d', { comma: true }), { foo: ['a, b', 'd'] }); + st.deepEqual(qs.parse('foo=a%2C%20b,c%2C%20d', { comma: true }), { foo: ['a, b', 'c, d'] }); + + st.end(); + }); + + t.test('parses an object in dot notation', function (st) { + var input = { + 'user.name': { 'pop[bob]': 3 }, + 'user.email.': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input, { allowDots: true }); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object and not child values', function (st) { + var input = { + 'user[name]': { 'pop[bob]': { test: 3 } }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': { test: 3 } }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.parse('a=b&c=d'); + global.Buffer = tempBuffer; + st.deepEqual(result, { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + var parsed; + + st.doesNotThrow(function () { + parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + st.equal('bar' in parsed.foo, true); + st.equal('baz' in parsed.foo, true); + st.equal(parsed.foo.bar, 'baz'); + st.deepEqual(parsed.foo.baz, a); + st.end(); + }); + + t.test('does not crash when parsing deep objects', function (st) { + var parsed; + var str = 'foo'; + + for (var i = 0; i < 5000; i++) { + str += '[p]'; + } + + str += '=bar'; + + st.doesNotThrow(function () { + parsed = qs.parse(str, { depth: 5000 }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + + var depth = 0; + var ref = parsed.foo; + while ((ref = ref.p)) { + depth += 1; + } + + st.equal(depth, 5000, 'parsed is 5000 properties deep'); + + st.end(); + }); + + t.test('parses null objects correctly', { skip: !Object.create }, function (st) { + var a = Object.create(null); + a.b = 'c'; + + st.deepEqual(qs.parse(a), { b: 'c' }); + var result = qs.parse({ a: a }); + st.equal('a' in result, true, 'result has "a" property'); + st.deepEqual(result.a, a); + st.end(); + }); + + t.test('parses dates correctly', function (st) { + var now = new Date(); + st.deepEqual(qs.parse({ a: now }), { a: now }); + st.end(); + }); + + t.test('parses regular expressions correctly', function (st) { + var re = /^test$/; + st.deepEqual(qs.parse({ a: re }), { a: re }); + st.end(); + }); + + t.test('does not allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: false }), {}); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: false }), {}); + + st.deepEqual( + qs.parse('toString', { allowPrototypes: false }), + {}, + 'bare "toString" results in {}' + ); + + st.end(); + }); + + t.test('can allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }); + + st.deepEqual( + qs.parse('toString', { allowPrototypes: true }), + { toString: '' }, + 'bare "toString" results in { toString: "" }' + ); + + st.end(); + }); + + t.test('params starting with a closing bracket', function (st) { + st.deepEqual(qs.parse(']=toString'), { ']': 'toString' }); + st.deepEqual(qs.parse(']]=toString'), { ']]': 'toString' }); + st.deepEqual(qs.parse(']hello]=toString'), { ']hello]': 'toString' }); + st.end(); + }); + + t.test('params starting with a starting bracket', function (st) { + st.deepEqual(qs.parse('[=toString'), { '[': 'toString' }); + st.deepEqual(qs.parse('[[=toString'), { '[[': 'toString' }); + st.deepEqual(qs.parse('[hello[=toString'), { '[hello[': 'toString' }); + st.end(); + }); + + t.test('add keys to objects', function (st) { + st.deepEqual( + qs.parse('a[b]=c&a=d'), + { a: { b: 'c', d: true } }, + 'can add keys to objects' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString'), + { a: { b: 'c' } }, + 'can not overwrite prototype' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString', { allowPrototypes: true }), + { a: { b: 'c', toString: true } }, + 'can overwrite prototype with allowPrototypes true' + ); + + st.deepEqual( + qs.parse('a[b]=c&a=toString', { plainObjects: true }), + { __proto__: null, a: { __proto__: null, b: 'c', toString: true } }, + 'can overwrite prototype with plainObjects true' + ); + + st.end(); + }); + + t.test('dunder proto is ignored', function (st) { + var payload = 'categories[__proto__]=login&categories[__proto__]&categories[length]=42'; + var result = qs.parse(payload, { allowPrototypes: true }); + + st.deepEqual( + result, + { + categories: { + length: '42' + } + }, + 'silent [[Prototype]] payload' + ); + + var plainResult = qs.parse(payload, { allowPrototypes: true, plainObjects: true }); + + st.deepEqual( + plainResult, + { + __proto__: null, + categories: { + __proto__: null, + length: '42' + } + }, + 'silent [[Prototype]] payload: plain objects' + ); + + var query = qs.parse('categories[__proto__]=cats&categories[__proto__]=dogs&categories[some][json]=toInject', { allowPrototypes: true }); + + st.notOk(Array.isArray(query.categories), 'is not an array'); + st.notOk(query.categories instanceof Array, 'is not instanceof an array'); + st.deepEqual(query.categories, { some: { json: 'toInject' } }); + st.equal(JSON.stringify(query.categories), '{"some":{"json":"toInject"}}', 'stringifies as a non-array'); + + st.deepEqual( + qs.parse('foo[__proto__][hidden]=value&foo[bar]=stuffs', { allowPrototypes: true }), + { + foo: { + bar: 'stuffs' + } + }, + 'hidden values' + ); + + st.deepEqual( + qs.parse('foo[__proto__][hidden]=value&foo[bar]=stuffs', { allowPrototypes: true, plainObjects: true }), + { + __proto__: null, + foo: { + __proto__: null, + bar: 'stuffs' + } + }, + 'hidden values: plain objects' + ); + + st.end(); + }); + + t.test('can return null objects', { skip: !Object.create }, function (st) { + var expected = Object.create(null); + expected.a = Object.create(null); + expected.a.b = 'c'; + expected.a.hasOwnProperty = 'd'; + st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); + st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); + var expectedArray = Object.create(null); + expectedArray.a = Object.create(null); + expectedArray.a[0] = 'b'; + expectedArray.a.c = 'd'; + st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); + st.end(); + }); + + t.test('can parse with custom encoding', function (st) { + st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { + decoder: function (str) { + var reg = /%([0-9A-F]{2})/ig; + var result = []; + var parts = reg.exec(str); + while (parts) { + result.push(parseInt(parts[1], 16)); + parts = reg.exec(str); + } + return String(iconv.decode(SaferBuffer.from(result), 'shift_jis')); + } + }), { 県: '大阪府' }); + st.end(); + }); + + t.test('receives the default decoder as a second argument', function (st) { + st.plan(1); + qs.parse('a', { + decoder: function (str, defaultDecoder) { + st.equal(defaultDecoder, utils.decode); + } + }); + st.end(); + }); + + t.test('throws error with wrong decoder', function (st) { + st['throws'](function () { + qs.parse({}, { decoder: 'string' }); + }, new TypeError('Decoder has to be a function.')); + st.end(); + }); + + t.test('does not mutate the options argument', function (st) { + var options = {}; + qs.parse('a[b]=true', options); + st.deepEqual(options, {}); + st.end(); + }); + + t.test('throws if an invalid charset is specified', function (st) { + st['throws'](function () { + qs.parse('a=b', { charset: 'foobar' }); + }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); + st.end(); + }); + + t.test('parses an iso-8859-1 string if asked to', function (st) { + st.deepEqual(qs.parse('%A2=%BD', { charset: 'iso-8859-1' }), { '¢': '½' }); + st.end(); + }); + + var urlEncodedCheckmarkInUtf8 = '%E2%9C%93'; + var urlEncodedOSlashInUtf8 = '%C3%B8'; + var urlEncodedNumCheckmark = '%26%2310003%3B'; + var urlEncodedNumSmiley = '%26%239786%3B'; + + t.test('prefers an utf-8 charset specified by the utf8 sentinel to a default charset of iso-8859-1', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'iso-8859-1' }), { ø: 'ø' }); + st.end(); + }); + + t.test('prefers an iso-8859-1 charset specified by the utf8 sentinel to a default charset of utf-8', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { 'ø': 'ø' }); + st.end(); + }); + + t.test('does not require the utf8 sentinel to be defined before the parameters whose decoding it affects', function (st) { + st.deepEqual(qs.parse('a=' + urlEncodedOSlashInUtf8 + '&utf8=' + urlEncodedNumCheckmark, { charsetSentinel: true, charset: 'utf-8' }), { a: 'ø' }); + st.end(); + }); + + t.test('should ignore an utf8 sentinel with an unknown value', function (st) { + st.deepEqual(qs.parse('utf8=foo&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true, charset: 'utf-8' }), { ø: 'ø' }); + st.end(); + }); + + t.test('uses the utf8 sentinel to switch to utf-8 when no default charset is given', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedCheckmarkInUtf8 + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { ø: 'ø' }); + st.end(); + }); + + t.test('uses the utf8 sentinel to switch to iso-8859-1 when no default charset is given', function (st) { + st.deepEqual(qs.parse('utf8=' + urlEncodedNumCheckmark + '&' + urlEncodedOSlashInUtf8 + '=' + urlEncodedOSlashInUtf8, { charsetSentinel: true }), { 'ø': 'ø' }); + st.end(); + }); + + t.test('interprets numeric entities in iso-8859-1 when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1', interpretNumericEntities: true }), { foo: '☺' }); + st.end(); + }); + + t.test('handles a custom decoder returning `null`, in the `iso-8859-1` charset, when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=&bar=' + urlEncodedNumSmiley, { + charset: 'iso-8859-1', + decoder: function (str, defaultDecoder, charset) { + return str ? defaultDecoder(str, defaultDecoder, charset) : null; + }, + interpretNumericEntities: true + }), { foo: null, bar: '☺' }); + st.end(); + }); + + t.test('does not interpret numeric entities in iso-8859-1 when `interpretNumericEntities` is absent', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'iso-8859-1' }), { foo: '☺' }); + st.end(); + }); + + t.test('does not interpret numeric entities when the charset is utf-8, even when `interpretNumericEntities`', function (st) { + st.deepEqual(qs.parse('foo=' + urlEncodedNumSmiley, { charset: 'utf-8', interpretNumericEntities: true }), { foo: '☺' }); + st.end(); + }); + + t.test('does not interpret %uXXXX syntax in iso-8859-1 mode', function (st) { + st.deepEqual(qs.parse('%u263A=%u263A', { charset: 'iso-8859-1' }), { '%u263A': '%u263A' }); + st.end(); + }); + + t.test('allows for decoding keys and values differently', function (st) { + var decoder = function (str, defaultDecoder, charset, type) { + if (type === 'key') { + return defaultDecoder(str, defaultDecoder, charset, type).toLowerCase(); + } + if (type === 'value') { + return defaultDecoder(str, defaultDecoder, charset, type).toUpperCase(); + } + throw 'this should never happen! type: ' + type; + }; + + st.deepEqual(qs.parse('KeY=vAlUe', { decoder: decoder }), { key: 'VALUE' }); + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/qs/test/stringify.js b/node_modules/qs/test/stringify.js new file mode 100644 index 0000000..f0cdfef --- /dev/null +++ b/node_modules/qs/test/stringify.js @@ -0,0 +1,909 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var utils = require('../lib/utils'); +var iconv = require('iconv-lite'); +var SaferBuffer = require('safer-buffer').Buffer; +var hasSymbols = require('has-symbols'); +var hasBigInt = typeof BigInt === 'function'; + +test('stringify()', function (t) { + t.test('stringifies a querystring object', function (st) { + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: 1 }), 'a=1'); + st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); + st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); + st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); + st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); + st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); + st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); + st.end(); + }); + + t.test('stringifies falsy values', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(null, { strictNullHandling: true }), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(0), ''); + st.end(); + }); + + t.test('stringifies symbols', { skip: !hasSymbols() }, function (st) { + st.equal(qs.stringify(Symbol.iterator), ''); + st.equal(qs.stringify([Symbol.iterator]), '0=Symbol%28Symbol.iterator%29'); + st.equal(qs.stringify({ a: Symbol.iterator }), 'a=Symbol%28Symbol.iterator%29'); + st.equal( + qs.stringify({ a: [Symbol.iterator] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), + 'a[]=Symbol%28Symbol.iterator%29' + ); + st.end(); + }); + + t.test('stringifies bigints', { skip: !hasBigInt }, function (st) { + var three = BigInt(3); + var encodeWithN = function (value, defaultEncoder, charset) { + var result = defaultEncoder(value, defaultEncoder, charset); + return typeof value === 'bigint' ? result + 'n' : result; + }; + st.equal(qs.stringify(three), ''); + st.equal(qs.stringify([three]), '0=3'); + st.equal(qs.stringify([three], { encoder: encodeWithN }), '0=3n'); + st.equal(qs.stringify({ a: three }), 'a=3'); + st.equal(qs.stringify({ a: three }, { encoder: encodeWithN }), 'a=3n'); + st.equal( + qs.stringify({ a: [three] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), + 'a[]=3' + ); + st.equal( + qs.stringify({ a: [three] }, { encodeValuesOnly: true, encoder: encodeWithN, arrayFormat: 'brackets' }), + 'a[]=3n' + ); + st.end(); + }); + + t.test('adds query prefix', function (st) { + st.equal(qs.stringify({ a: 'b' }, { addQueryPrefix: true }), '?a=b'); + st.end(); + }); + + t.test('with query prefix, outputs blank string given an empty object', function (st) { + st.equal(qs.stringify({}, { addQueryPrefix: true }), ''); + st.end(); + }); + + t.test('stringifies nested falsy values', function (st) { + st.equal(qs.stringify({ a: { b: { c: null } } }), 'a%5Bb%5D%5Bc%5D='); + st.equal(qs.stringify({ a: { b: { c: null } } }, { strictNullHandling: true }), 'a%5Bb%5D%5Bc%5D'); + st.equal(qs.stringify({ a: { b: { c: false } } }), 'a%5Bb%5D%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies a nested object', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies a nested object with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); + st.end(); + }); + + t.test('stringifies an array value', function (st) { + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }), + 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'comma' }), + 'a=b%2Cc%2Cd', + 'comma => comma' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'default => indices' + ); + st.end(); + }); + + t.test('omits nulls when asked', function (st) { + st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); + st.end(); + }); + + t.test('omits nested nulls when asked', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('omits array indices when asked', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); + st.end(); + }); + + t.test('stringifies an array value with one item vs multiple items', function (st) { + st.test('non-array item', function (s2t) { + s2t.equal(qs.stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'indices' }), 'a=c'); + s2t.equal(qs.stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), 'a=c'); + s2t.equal(qs.stringify({ a: 'c' }, { encodeValuesOnly: true, arrayFormat: 'comma' }), 'a=c'); + s2t.equal(qs.stringify({ a: 'c' }, { encodeValuesOnly: true }), 'a=c'); + + s2t.end(); + }); + + st.test('array with a single item', function (s2t) { + s2t.equal(qs.stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), 'a[0]=c'); + s2t.equal(qs.stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), 'a[]=c'); + s2t.equal(qs.stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), 'a=c'); + s2t.equal(qs.stringify({ a: ['c'] }, { encodeValuesOnly: true, arrayFormat: 'comma', commaRoundTrip: true }), 'a[]=c'); // so it parses back as an array + s2t.equal(qs.stringify({ a: ['c'] }, { encodeValuesOnly: true }), 'a[0]=c'); + + s2t.end(); + }); + + st.test('array with multiple items', function (s2t) { + s2t.equal(qs.stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), 'a[0]=c&a[1]=d'); + s2t.equal(qs.stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), 'a[]=c&a[]=d'); + s2t.equal(qs.stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), 'a=c,d'); + s2t.equal(qs.stringify({ a: ['c', 'd'] }, { encodeValuesOnly: true }), 'a[0]=c&a[1]=d'); + + s2t.end(); + }); + + st.end(); + }); + + t.test('stringifies a nested array value', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true, arrayFormat: 'indices' }), 'a[b][0]=c&a[b][1]=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), 'a[b][]=c&a[b][]=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true, arrayFormat: 'comma' }), 'a[b]=c,d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { encodeValuesOnly: true }), 'a[b][0]=c&a[b][1]=d'); + st.end(); + }); + + t.test('stringifies a nested array value with dots notation', function (st) { + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encodeValuesOnly: true, arrayFormat: 'indices' } + ), + 'a.b[0]=c&a.b[1]=d', + 'indices: stringifies with dots + indices' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encodeValuesOnly: true, arrayFormat: 'brackets' } + ), + 'a.b[]=c&a.b[]=d', + 'brackets: stringifies with dots + brackets' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encodeValuesOnly: true, arrayFormat: 'comma' } + ), + 'a.b=c,d', + 'comma: stringifies with dots + comma' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encodeValuesOnly: true } + ), + 'a.b[0]=c&a.b[1]=d', + 'default: stringifies with dots + indices' + ); + st.end(); + }); + + t.test('stringifies an object inside an array', function (st) { + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D=c', // a[0][b]=c + 'indices => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D=c', // a[][b]=c + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }), + 'a%5B0%5D%5Bb%5D=c', + 'default => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'indices => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D%5Bc%5D%5B%5D=1', + 'brackets => brackets' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an array with mixed objects and primitives', function (st) { + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'indices' }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'brackets' }), + 'a[][b]=1&a[]=2&a[]=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true, arrayFormat: 'comma' }), + '???', + 'brackets => brackets', + { skip: 'TODO: figure out what this should do' } + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encodeValuesOnly: true }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an object inside an array with dots notation', function (st) { + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b=c', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b=c', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false } + ), + 'a[0].b=c', + 'default => indices' + ); + + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b.c[0]=1', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b.c[]=1', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false } + ), + 'a[0].b.c[0]=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('does not omit object keys when indices = false', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when indices=true', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); + st.end(); + }); + + t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); + st.end(); + }); + + t.test('stringifies a complicated object', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies an empty value', function (st) { + st.equal(qs.stringify({ a: '' }), 'a='); + st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); + + st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); + st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); + + st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); + + st.end(); + }); + + t.test('stringifies an empty array in different arrayFormat', function (st) { + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false }), 'b[0]=&c=c'); + // arrayFormat default + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'indices' }), 'b[0]=&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'brackets' }), 'b[]=&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'repeat' }), 'b=&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma' }), 'b=&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma', commaRoundTrip: true }), 'b[]=&c=c'); + // with strictNullHandling + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'indices', strictNullHandling: true }), 'b[0]&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'brackets', strictNullHandling: true }), 'b[]&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'repeat', strictNullHandling: true }), 'b&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma', strictNullHandling: true }), 'b&c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma', strictNullHandling: true, commaRoundTrip: true }), 'b[]&c=c'); + // with skipNulls + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'indices', skipNulls: true }), 'c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'brackets', skipNulls: true }), 'c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'repeat', skipNulls: true }), 'c=c'); + st.equal(qs.stringify({ a: [], b: [null], c: 'c' }, { encode: false, arrayFormat: 'comma', skipNulls: true }), 'c=c'); + + st.end(); + }); + + t.test('stringifies a null object', { skip: !Object.create }, function (st) { + var obj = Object.create(null); + obj.a = 'b'; + st.equal(qs.stringify(obj), 'a=b'); + st.end(); + }); + + t.test('returns an empty string for invalid input', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(''), ''); + st.end(); + }); + + t.test('stringifies an object with a null object as a child', { skip: !Object.create }, function (st) { + var obj = { a: Object.create(null) }; + + obj.a.b = 'c'; + st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('drops keys with a value of undefined', function (st) { + st.equal(qs.stringify({ a: undefined }), ''); + + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); + st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); + st.end(); + }); + + t.test('url encodes values', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('stringifies a date', function (st) { + var now = new Date(); + var str = 'a=' + encodeURIComponent(now.toISOString()); + st.equal(qs.stringify({ a: now }), str); + st.end(); + }); + + t.test('stringifies the weird object from qs', function (st) { + st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); + st.end(); + }); + + t.test('skips properties that are part of the object prototype', function (st) { + Object.prototype.crash = 'test'; + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + delete Object.prototype.crash; + st.end(); + }); + + t.test('stringifies boolean values', function (st) { + st.equal(qs.stringify({ a: true }), 'a=true'); + st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); + st.equal(qs.stringify({ b: false }), 'b=false'); + st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies buffer values', function (st) { + st.equal(qs.stringify({ a: SaferBuffer.from('test') }), 'a=test'); + st.equal(qs.stringify({ a: { b: SaferBuffer.from('test') } }), 'a%5Bb%5D=test'); + st.end(); + }); + + t.test('stringifies an object using an alternative delimiter', function (st) { + st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.stringify({ a: 'b', c: 'd' }); + global.Buffer = tempBuffer; + st.equal(result, 'a=b&c=d'); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + st['throws']( + function () { qs.stringify({ 'foo[bar]': 'baz', 'foo[baz]': a }); }, + /RangeError: Cyclic object value/, + 'cyclic values throw' + ); + + var circular = { + a: 'value' + }; + circular.a = circular; + st['throws']( + function () { qs.stringify(circular); }, + /RangeError: Cyclic object value/, + 'cyclic values throw' + ); + + var arr = ['a']; + st.doesNotThrow( + function () { qs.stringify({ x: arr, y: arr }); }, + 'non-cyclic values do not throw' + ); + + st.end(); + }); + + t.test('non-circular duplicated references can still work', function (st) { + var hourOfDay = { + 'function': 'hour_of_day' + }; + + var p1 = { + 'function': 'gte', + arguments: [hourOfDay, 0] + }; + var p2 = { + 'function': 'lte', + arguments: [hourOfDay, 23] + }; + + st.equal( + qs.stringify({ filters: { $and: [p1, p2] } }, { encodeValuesOnly: true }), + 'filters[$and][0][function]=gte&filters[$and][0][arguments][0][function]=hour_of_day&filters[$and][0][arguments][1]=0&filters[$and][1][function]=lte&filters[$and][1][arguments][0][function]=hour_of_day&filters[$and][1][arguments][1]=23' + ); + + st.end(); + }); + + t.test('selects properties when filter=array', function (st) { + st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); + st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); + + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' } + ), + 'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2] } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('supports custom representations when filter=function', function (st) { + var calls = 0; + var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; + var filterFunc = function (prefix, value) { + calls += 1; + if (calls === 1) { + st.equal(prefix, '', 'prefix is empty'); + st.equal(value, obj); + } else if (prefix === 'c') { + return void 0; + } else if (value instanceof Date) { + st.equal(prefix, 'e[f]'); + return value.getTime(); + } + return value; + }; + + st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); + st.equal(calls, 5); + st.end(); + }); + + t.test('can disable uri encoding', function (st) { + st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); + st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); + st.end(); + }); + + t.test('can sort the keys', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); + st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); + st.end(); + }); + + t.test('can sort the keys at depth 3 or more too', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: sort, encode: false } + ), + 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb' + ); + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: null, encode: false } + ), + 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b' + ); + st.end(); + }); + + t.test('can stringify with custom encoding', function (st) { + st.equal(qs.stringify({ 県: '大阪府', '': '' }, { + encoder: function (str) { + if (str.length === 0) { + return ''; + } + var buf = iconv.encode(str, 'shiftjis'); + var result = []; + for (var i = 0; i < buf.length; ++i) { + result.push(buf.readUInt8(i).toString(16)); + } + return '%' + result.join('%'); + } + }), '%8c%a7=%91%e5%8d%e3%95%7b&='); + st.end(); + }); + + t.test('receives the default encoder as a second argument', function (st) { + st.plan(2); + qs.stringify({ a: 1 }, { + encoder: function (str, defaultEncoder) { + st.equal(defaultEncoder, utils.encode); + } + }); + st.end(); + }); + + t.test('throws error with wrong encoder', function (st) { + st['throws'](function () { + qs.stringify({}, { encoder: 'string' }); + }, new TypeError('Encoder has to be a function.')); + st.end(); + }); + + t.test('can use custom encoder for a buffer object', { skip: typeof Buffer === 'undefined' }, function (st) { + st.equal(qs.stringify({ a: SaferBuffer.from([1]) }, { + encoder: function (buffer) { + if (typeof buffer === 'string') { + return buffer; + } + return String.fromCharCode(buffer.readUInt8(0) + 97); + } + }), 'a=b'); + + st.equal(qs.stringify({ a: SaferBuffer.from('a b') }, { + encoder: function (buffer) { + return buffer; + } + }), 'a=a b'); + st.end(); + }); + + t.test('serializeDate option', function (st) { + var date = new Date(); + st.equal( + qs.stringify({ a: date }), + 'a=' + date.toISOString().replace(/:/g, '%3A'), + 'default is toISOString' + ); + + var mutatedDate = new Date(); + mutatedDate.toISOString = function () { + throw new SyntaxError(); + }; + st['throws'](function () { + mutatedDate.toISOString(); + }, SyntaxError); + st.equal( + qs.stringify({ a: mutatedDate }), + 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), + 'toISOString works even when method is not locally present' + ); + + var specificDate = new Date(6); + st.equal( + qs.stringify( + { a: specificDate }, + { serializeDate: function (d) { return d.getTime() * 7; } } + ), + 'a=42', + 'custom serializeDate function called' + ); + + st.equal( + qs.stringify( + { a: [date] }, + { + serializeDate: function (d) { return d.getTime(); }, + arrayFormat: 'comma' + } + ), + 'a=' + date.getTime(), + 'works with arrayFormat comma' + ); + st.equal( + qs.stringify( + { a: [date] }, + { + serializeDate: function (d) { return d.getTime(); }, + arrayFormat: 'comma', + commaRoundTrip: true + } + ), + 'a%5B%5D=' + date.getTime(), + 'works with arrayFormat comma' + ); + + st.end(); + }); + + t.test('RFC 1738 serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC1738 }), 'a=b+c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC1738 }), 'a+b=c+d'); + st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }, { format: qs.formats.RFC1738 }), 'a+b=a+b'); + + st.equal(qs.stringify({ 'foo(ref)': 'bar' }, { format: qs.formats.RFC1738 }), 'foo(ref)=bar'); + + st.end(); + }); + + t.test('RFC 3986 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC3986 }), 'a=b%20c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC3986 }), 'a%20b=c%20d'); + st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }, { format: qs.formats.RFC3986 }), 'a%20b=a%20b'); + + st.end(); + }); + + t.test('Backward compatibility to RFC 3986', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.equal(qs.stringify({ 'a b': SaferBuffer.from('a b') }), 'a%20b=a%20b'); + + st.end(); + }); + + t.test('Edge cases and unknown formats', function (st) { + ['UFO1234', false, 1234, null, {}, []].forEach(function (format) { + st['throws']( + function () { + qs.stringify({ a: 'b c' }, { format: format }); + }, + new TypeError('Unknown format option provided.') + ); + }); + st.end(); + }); + + t.test('encodeValuesOnly', function (st) { + st.equal( + qs.stringify( + { a: 'b', c: ['d', 'e=f'], f: [['g'], ['h']] }, + { encodeValuesOnly: true } + ), + 'a=b&c[0]=d&c[1]=e%3Df&f[0][0]=g&f[1][0]=h' + ); + st.equal( + qs.stringify( + { a: 'b', c: ['d', 'e'], f: [['g'], ['h']] } + ), + 'a=b&c%5B0%5D=d&c%5B1%5D=e&f%5B0%5D%5B0%5D=g&f%5B1%5D%5B0%5D=h' + ); + st.end(); + }); + + t.test('encodeValuesOnly - strictNullHandling', function (st) { + st.equal( + qs.stringify( + { a: { b: null } }, + { encodeValuesOnly: true, strictNullHandling: true } + ), + 'a[b]' + ); + st.end(); + }); + + t.test('throws if an invalid charset is specified', function (st) { + st['throws'](function () { + qs.stringify({ a: 'b' }, { charset: 'foobar' }); + }, new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined')); + st.end(); + }); + + t.test('respects a charset of iso-8859-1', function (st) { + st.equal(qs.stringify({ æ: 'æ' }, { charset: 'iso-8859-1' }), '%E6=%E6'); + st.end(); + }); + + t.test('encodes unrepresentable chars as numeric entities in iso-8859-1 mode', function (st) { + st.equal(qs.stringify({ a: '☺' }, { charset: 'iso-8859-1' }), 'a=%26%239786%3B'); + st.end(); + }); + + t.test('respects an explicit charset of utf-8 (the default)', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charset: 'utf-8' }), 'a=%C3%A6'); + st.end(); + }); + + t.test('adds the right sentinel when instructed to and the charset is utf-8', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'utf-8' }), 'utf8=%E2%9C%93&a=%C3%A6'); + st.end(); + }); + + t.test('adds the right sentinel when instructed to and the charset is iso-8859-1', function (st) { + st.equal(qs.stringify({ a: 'æ' }, { charsetSentinel: true, charset: 'iso-8859-1' }), 'utf8=%26%2310003%3B&a=%E6'); + st.end(); + }); + + t.test('does not mutate the options argument', function (st) { + var options = {}; + qs.stringify({}, options); + st.deepEqual(options, {}); + st.end(); + }); + + t.test('strictNullHandling works with custom filter', function (st) { + var filter = function (prefix, value) { + return value; + }; + + var options = { strictNullHandling: true, filter: filter }; + st.equal(qs.stringify({ key: null }, options), 'key'); + st.end(); + }); + + t.test('strictNullHandling works with null serializeDate', function (st) { + var serializeDate = function () { + return null; + }; + var options = { strictNullHandling: true, serializeDate: serializeDate }; + var date = new Date(); + st.equal(qs.stringify({ key: date }, options), 'key'); + st.end(); + }); + + t.test('allows for encoding keys and values differently', function (st) { + var encoder = function (str, defaultEncoder, charset, type) { + if (type === 'key') { + return defaultEncoder(str, defaultEncoder, charset, type).toLowerCase(); + } + if (type === 'value') { + return defaultEncoder(str, defaultEncoder, charset, type).toUpperCase(); + } + throw 'this should never happen! type: ' + type; + }; + + st.deepEqual(qs.stringify({ KeY: 'vAlUe' }, { encoder: encoder }), 'key=VALUE'); + st.end(); + }); + + t.test('objects inside arrays', function (st) { + var obj = { a: { b: { c: 'd', e: 'f' } } }; + var withArray = { a: { b: [{ c: 'd', e: 'f' }] } }; + + st.equal(qs.stringify(obj, { encode: false }), 'a[b][c]=d&a[b][e]=f', 'no array, no arrayFormat'); + st.equal(qs.stringify(obj, { encode: false, arrayFormat: 'bracket' }), 'a[b][c]=d&a[b][e]=f', 'no array, bracket'); + st.equal(qs.stringify(obj, { encode: false, arrayFormat: 'indices' }), 'a[b][c]=d&a[b][e]=f', 'no array, indices'); + st.equal(qs.stringify(obj, { encode: false, arrayFormat: 'comma' }), 'a[b][c]=d&a[b][e]=f', 'no array, comma'); + + st.equal(qs.stringify(withArray, { encode: false }), 'a[b][0][c]=d&a[b][0][e]=f', 'array, no arrayFormat'); + st.equal(qs.stringify(withArray, { encode: false, arrayFormat: 'bracket' }), 'a[b][0][c]=d&a[b][0][e]=f', 'array, bracket'); + st.equal(qs.stringify(withArray, { encode: false, arrayFormat: 'indices' }), 'a[b][0][c]=d&a[b][0][e]=f', 'array, indices'); + st.equal( + qs.stringify(withArray, { encode: false, arrayFormat: 'comma' }), + '???', + 'array, comma', + { skip: 'TODO: figure out what this should do' } + ); + + st.end(); + }); + + t.test('stringifies sparse arrays', function (st) { + /* eslint no-sparse-arrays: 0 */ + st.equal(qs.stringify({ a: [, '2', , , '1'] }, { encodeValuesOnly: true }), 'a[1]=2&a[4]=1'); + st.equal(qs.stringify({ a: [, { b: [, , { c: '1' }] }] }, { encodeValuesOnly: true }), 'a[1][b][2][c]=1'); + st.equal(qs.stringify({ a: [, [, , [, , , { c: '1' }]]] }, { encodeValuesOnly: true }), 'a[1][2][3][c]=1'); + st.equal(qs.stringify({ a: [, [, , [, , , { c: [, '1'] }]]] }, { encodeValuesOnly: true }), 'a[1][2][3][c][1]=1'); + + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/qs/test/utils.js b/node_modules/qs/test/utils.js new file mode 100644 index 0000000..aa84dfd --- /dev/null +++ b/node_modules/qs/test/utils.js @@ -0,0 +1,136 @@ +'use strict'; + +var test = require('tape'); +var inspect = require('object-inspect'); +var SaferBuffer = require('safer-buffer').Buffer; +var forEach = require('for-each'); +var utils = require('../lib/utils'); + +test('merge()', function (t) { + t.deepEqual(utils.merge(null, true), [null, true], 'merges true into null'); + + t.deepEqual(utils.merge(null, [42]), [null, 42], 'merges null into an array'); + + t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); + + var oneMerged = utils.merge({ foo: 'bar' }, { foo: { first: '123' } }); + t.deepEqual(oneMerged, { foo: ['bar', { first: '123' }] }, 'merges a standalone and an object into an array'); + + var twoMerged = utils.merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } }); + t.deepEqual(twoMerged, { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, 'merges a standalone and two objects into an array'); + + var sandwiched = utils.merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' }); + t.deepEqual(sandwiched, { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, 'merges an object sandwiched by two standalones into an array'); + + var nestedArrays = utils.merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] }); + t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] }); + + var noOptionsNonObjectSource = utils.merge({ foo: 'baz' }, 'bar'); + t.deepEqual(noOptionsNonObjectSource, { foo: 'baz', bar: true }); + + t.test( + 'avoids invoking array setters unnecessarily', + { skip: typeof Object.defineProperty !== 'function' }, + function (st) { + var setCount = 0; + var getCount = 0; + var observed = []; + Object.defineProperty(observed, 0, { + get: function () { + getCount += 1; + return { bar: 'baz' }; + }, + set: function () { setCount += 1; } + }); + utils.merge(observed, [null]); + st.equal(setCount, 0); + st.equal(getCount, 1); + observed[0] = observed[0]; // eslint-disable-line no-self-assign + st.equal(setCount, 1); + st.equal(getCount, 2); + st.end(); + } + ); + + t.end(); +}); + +test('assign()', function (t) { + var target = { a: 1, b: 2 }; + var source = { b: 3, c: 4 }; + var result = utils.assign(target, source); + + t.equal(result, target, 'returns the target'); + t.deepEqual(target, { a: 1, b: 3, c: 4 }, 'target and source are merged'); + t.deepEqual(source, { b: 3, c: 4 }, 'source is untouched'); + + t.end(); +}); + +test('combine()', function (t) { + t.test('both arrays', function (st) { + var a = [1]; + var b = [2]; + var combined = utils.combine(a, b); + + st.deepEqual(a, [1], 'a is not mutated'); + st.deepEqual(b, [2], 'b is not mutated'); + st.notEqual(a, combined, 'a !== combined'); + st.notEqual(b, combined, 'b !== combined'); + st.deepEqual(combined, [1, 2], 'combined is a + b'); + + st.end(); + }); + + t.test('one array, one non-array', function (st) { + var aN = 1; + var a = [aN]; + var bN = 2; + var b = [bN]; + + var combinedAnB = utils.combine(aN, b); + st.deepEqual(b, [bN], 'b is not mutated'); + st.notEqual(aN, combinedAnB, 'aN + b !== aN'); + st.notEqual(a, combinedAnB, 'aN + b !== a'); + st.notEqual(bN, combinedAnB, 'aN + b !== bN'); + st.notEqual(b, combinedAnB, 'aN + b !== b'); + st.deepEqual([1, 2], combinedAnB, 'first argument is array-wrapped when not an array'); + + var combinedABn = utils.combine(a, bN); + st.deepEqual(a, [aN], 'a is not mutated'); + st.notEqual(aN, combinedABn, 'a + bN !== aN'); + st.notEqual(a, combinedABn, 'a + bN !== a'); + st.notEqual(bN, combinedABn, 'a + bN !== bN'); + st.notEqual(b, combinedABn, 'a + bN !== b'); + st.deepEqual([1, 2], combinedABn, 'second argument is array-wrapped when not an array'); + + st.end(); + }); + + t.test('neither is an array', function (st) { + var combined = utils.combine(1, 2); + st.notEqual(1, combined, '1 + 2 !== 1'); + st.notEqual(2, combined, '1 + 2 !== 2'); + st.deepEqual([1, 2], combined, 'both arguments are array-wrapped when not an array'); + + st.end(); + }); + + t.end(); +}); + +test('isBuffer()', function (t) { + forEach([null, undefined, true, false, '', 'abc', 42, 0, NaN, {}, [], function () {}, /a/g], function (x) { + t.equal(utils.isBuffer(x), false, inspect(x) + ' is not a buffer'); + }); + + var fakeBuffer = { constructor: Buffer }; + t.equal(utils.isBuffer(fakeBuffer), false, 'fake buffer is not a buffer'); + + var saferBuffer = SaferBuffer.from('abc'); + t.equal(utils.isBuffer(saferBuffer), true, 'SaferBuffer instance is a buffer'); + + var buffer = Buffer.from && Buffer.alloc ? Buffer.from('abc') : new Buffer('abc'); + t.equal(utils.isBuffer(buffer), true, 'real Buffer instance is a buffer'); + t.end(); +}); diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 0000000..6f035ab --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/node_modules/readable-stream/errors-browser.js b/node_modules/readable-stream/errors-browser.js new file mode 100644 index 0000000..fb8e73e --- /dev/null +++ b/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/node_modules/readable-stream/errors.js b/node_modules/readable-stream/errors.js new file mode 100644 index 0000000..8471526 --- /dev/null +++ b/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/node_modules/readable-stream/experimentalWarning.js b/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 0000000..78e8414 --- /dev/null +++ b/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..6752519 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,139 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. +'use strict'; +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +module.exports = Duplex; + +var Readable = require('./_stream_readable'); + +var Writable = require('./_stream_writable'); + +require('inherits')(Duplex, Readable); + +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer + +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. + // But allow more writes to happen in this tick. + + process.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..32e7414 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,39 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +require('inherits')(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..192d451 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1124 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict'; + +module.exports = Readable; +/**/ + +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; +/**/ + +var EE = require('events').EventEmitter; + +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ + + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +/**/ + + +var debugUtil = require('util'); + +var debug; + +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + + +var BufferList = require('./internal/streams/buffer_list'); + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; + +require('inherits')(Readable, Stream); + +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + + this.sync = true; // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s + + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + + this.readingMore = false; + this.decoder = null; + this.encoding = null; + + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy + + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; + +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; // Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. + + +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; // Unshift should *always* be something directly out of read() + + +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + + return er; +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; // backwards compatibility. + + +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; + +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + + return n; +} // This function is designed to be inlinable, so please take care when making +// changes to the function body. + + +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } // If we're asking for more than the current hwm, then raise the hwm. + + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; // Don't have enough + + if (!state.ended) { + state.needReadable = true; + return 0; + } + + return state.length; +} // you can override either this method, or the async _read(n) below. + + +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + // if we need a readable event, then we need to do some reading. + + + var doRead = state.needReadable; + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + + + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + + this._read(state.highWaterMark); + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + + if (state.decoder) { + var chunk = state.decoder.end(); + + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + + state.ended = true; + + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. + + +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. + + +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) // didn't get any data, stop spinning. + break; + } + + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. + + +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + + case 1: + state.pipes = [state.pipes, dest]; + break; + + default: + state.pipes.push(dest); + break; + } + + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + + + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + + function cleanup() { + debug('cleanup'); // cleanup event handlers once the pipe is broken + + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + + src.pause(); + } + } // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + + + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. + + + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + + dest.once('close', onclose); + + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } // tell the dest that it's being piped to + + + dest.emit('pipe', src); // start the flow if it hasn't been started already. + + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. + + if (state.pipesCount === 0) return this; // just one destination. most common case. + + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. + + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } // slow case. multiple pipe destinations. + + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + + + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; // set up data events if they are asked for +// Ensure readable listeners eventually get something + + +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; + +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} // pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. + + +Readable.prototype.resume = function () { + var state = this._readableState; + + if (!state.flowing) { + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; + resume(this, state); + } + + state.paused = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + + this._readableState.paused = true; + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. + + +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + + if (!ret) { + paused = true; + stream.pause(); + } + }); // proxy all the other methods. + // important when wrapping filters and duplexes. + + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } // proxy certain important events. + + + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } // when we try to consume some more bytes, simply unpause the + // underlying stream. + + + this._read = function (n) { + debug('wrapped _read', n); + + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + + return createReadableStreamAsyncIterator(this); + }; +} + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. + +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. + +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} + +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + + return from(Readable, iterable, opts); + }; +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + + return -1; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..41a738c --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,201 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. +'use strict'; + +module.exports = Transform; + +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + +var Duplex = require('./_stream_duplex'); + +require('inherits')(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + + ts.writechunk = null; + ts.writecb = null; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; // start out asking for a readable event once data is transformed. + + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } // When the writable side finishes, then flush out anything remaining. + + + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; // This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. + + +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; // Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. + + +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..a2634d7 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,697 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. +'use strict'; + +module.exports = Writable; +/* */ + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} // It seems a linked list but it is not +// there will be only 2 of these for each stream + + +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ + + +var Duplex; +/**/ + +Writable.WritableState = WritableState; +/**/ + +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + +var errorOrDestroy = destroyImpl.errorOrDestroy; + +require('inherits')(Writable, Stream); + +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // contains buffers or objects. + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. + + this.needDrain = false; // at the start of calling end() + + this.ending = false; // when end() has been called, and returned + + this.ended = false; // when 'finish' is emitted + + this.finished = false; // has it been destroyed + + this.destroyed = false; // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + + this.length = 0; // a flag to see when we're in the middle of a write. + + this.writing = false; // when true all writes will be buffered until .uncork() call + + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + + this.sync = true; // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + + this.onwrite = function (er) { + onwrite(stream, er); + }; // the callback that the user supplies to write(chunk,encoding,cb) + + + this.writecb = null; // the amount that is being written when _write is called. + + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + + this.prefinished = false; // True if the error was already emitted and should not be thrown again + + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + + while (current) { + out.push(current); + current = current.next; + } + + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); // Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. + + +var realHasInstance; + +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. + + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} // Otherwise people can pipe Writable streams, which is just wrong. + + +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. + + +function validChunk(stream, state, chunk, cb) { + var er; + + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + + return true; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; + +Writable.prototype.cork = function () { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); // if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. + +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. + + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); // this can emit finish, and it will always happen + // after error + + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); // this can emit finish, but finish must + // always follow error + + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} // Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. + + +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} // if there's something in the buffer waiting, then process it + + +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + + state.pendingcb++; + state.lastBufferedRequest = null; + + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks + + if (state.corked) { + state.corked = 1; + this.uncork(); + } // ignore unnecessary end() calls. + + + if (!state.ending) endWritable(this, state, cb); + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + + if (err) { + errorOrDestroy(stream, err); + } + + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + + if (need) { + prefinish(stream, state); + + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; + +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 0000000..9fb615a --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,207 @@ +'use strict'; + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = require('./end-of-stream'); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000..cdea425 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,210 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } + +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = require('buffer'), + Buffer = _require.Buffer; + +var _require2 = require('util'), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} + +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + + while (p = p.next) { + ret += s + p.data; + } + + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..3268a16 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,105 @@ +'use strict'; // undocumented cb() API, needed for core, not for public API + +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + + return this; + } // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + + if (this._readableState) { + this._readableState.destroyed = true; + } // if this is a duplex stream mark the writable part as destroyed as well + + + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + + return this; +} + +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000..831f286 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,104 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + callback.apply(this, args); + }; +} + +function noop() {} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + + var writableEnded = stream._writableState && stream._writableState.finished; + + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + + var readableEnded = stream._readableState && stream._readableState.endEmitted; + + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} + +module.exports = eos; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/from-browser.js b/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 0000000..a4ce56f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 0000000..6c41284 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,64 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 0000000..6589909 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,97 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} + +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; + +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want + + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function call(fn) { + fn(); +} + +function pipe(from, to) { + return from.pipe(to); +} + +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} + +module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 0000000..19887eb --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,27 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; +} + +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 0000000..0b0c4bd --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.0", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..adbf60d --- /dev/null +++ b/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..9e0ca12 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/node_modules/safe-buffer/LICENSE b/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/safe-buffer/README.md b/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/safe-buffer/index.d.ts b/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/safe-buffer/index.js b/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..22438da --- /dev/null +++ b/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/safe-buffer/package.json b/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..623fbc3 --- /dev/null +++ b/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/semver/LICENSE b/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md new file mode 100644 index 0000000..df54e7a --- /dev/null +++ b/node_modules/semver/README.md @@ -0,0 +1,568 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +You can also just load the module for the function that you care about, if +you'd like to minimize your footprint. + +```js +// load the whole API at once in a single object +const semver = require('semver') + +// or just load the bits you need +// all of them listed here, just pick and choose what you want + +// classes +const SemVer = require('semver/classes/semver') +const Comparator = require('semver/classes/comparator') +const Range = require('semver/classes/range') + +// functions for working with versions +const semverParse = require('semver/functions/parse') +const semverValid = require('semver/functions/valid') +const semverClean = require('semver/functions/clean') +const semverInc = require('semver/functions/inc') +const semverDiff = require('semver/functions/diff') +const semverMajor = require('semver/functions/major') +const semverMinor = require('semver/functions/minor') +const semverPatch = require('semver/functions/patch') +const semverPrerelease = require('semver/functions/prerelease') +const semverCompare = require('semver/functions/compare') +const semverRcompare = require('semver/functions/rcompare') +const semverCompareLoose = require('semver/functions/compare-loose') +const semverCompareBuild = require('semver/functions/compare-build') +const semverSort = require('semver/functions/sort') +const semverRsort = require('semver/functions/rsort') + +// low-level comparators between versions +const semverGt = require('semver/functions/gt') +const semverLt = require('semver/functions/lt') +const semverEq = require('semver/functions/eq') +const semverNeq = require('semver/functions/neq') +const semverGte = require('semver/functions/gte') +const semverLte = require('semver/functions/lte') +const semverCmp = require('semver/functions/cmp') +const semverCoerce = require('semver/functions/coerce') + +// working with ranges +const semverSatisfies = require('semver/functions/satisfies') +const semverMaxSatisfying = require('semver/ranges/max-satisfying') +const semverMinSatisfying = require('semver/ranges/min-satisfying') +const semverToComparators = require('semver/ranges/to-comparators') +const semverMinVersion = require('semver/ranges/min-version') +const semverValidRange = require('semver/ranges/valid') +const semverOutside = require('semver/ranges/outside') +const semverGtr = require('semver/ranges/gtr') +const semverLtr = require('semver/ranges/ltr') +const semverIntersects = require('semver/ranges/intersects') +const simplifyRange = require('semver/ranges/simplify') +const rangeSubset = require('semver/ranges/subset') +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless + `includePrerelease` is specified, in which case any version at all + satisfies) +* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0-0` +* `^0.2.3` := `>=0.2.3 <0.3.0-0` +* `^0.0.3` := `>=0.0.3 <0.0.4-0` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0-0` +* `^0.0.x` := `>=0.0.0 <0.1.0-0` +* `^0.0` := `>=0.0.0 <0.1.0-0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0-0` +* `^0.x` := `>=0.0.0 <1.0.0-0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the ranges comparators intersect +* `simplifyRange(versions, range)`: Return a "simplified" range that + matches the same items in `versions` list as the range specified. Note + that it does *not* guarantee that it would match the same versions in all + cases, only for the set of versions provided. This is useful when + generating ranges by joining together multiple versions with `||` + programmatically, to provide the user with something a bit more + ergonomic. If the provided range is shorter in string-length than the + generated range, then that is returned. +* `subset(subRange, superRange)`: Return `true` if the `subRange` range is + entirely contained by the `superRange` range. + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided +version is not valid a null will be returned. This does not work for +ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `2.1.5` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` + +## Exported Modules + + + +You may pull in just the part of this semver utility that you need, if you +are sensitive to packing and tree-shaking concerns. The main +`require('semver')` export uses getter functions to lazily load the parts +of the API that are used. + +The following modules are available: + +* `require('semver')` +* `require('semver/classes')` +* `require('semver/classes/comparator')` +* `require('semver/classes/range')` +* `require('semver/classes/semver')` +* `require('semver/functions/clean')` +* `require('semver/functions/cmp')` +* `require('semver/functions/coerce')` +* `require('semver/functions/compare')` +* `require('semver/functions/compare-build')` +* `require('semver/functions/compare-loose')` +* `require('semver/functions/diff')` +* `require('semver/functions/eq')` +* `require('semver/functions/gt')` +* `require('semver/functions/gte')` +* `require('semver/functions/inc')` +* `require('semver/functions/lt')` +* `require('semver/functions/lte')` +* `require('semver/functions/major')` +* `require('semver/functions/minor')` +* `require('semver/functions/neq')` +* `require('semver/functions/parse')` +* `require('semver/functions/patch')` +* `require('semver/functions/prerelease')` +* `require('semver/functions/rcompare')` +* `require('semver/functions/rsort')` +* `require('semver/functions/satisfies')` +* `require('semver/functions/sort')` +* `require('semver/functions/valid')` +* `require('semver/ranges/gtr')` +* `require('semver/ranges/intersects')` +* `require('semver/ranges/ltr')` +* `require('semver/ranges/max-satisfying')` +* `require('semver/ranges/min-satisfying')` +* `require('semver/ranges/min-version')` +* `require('semver/ranges/outside')` +* `require('semver/ranges/to-comparators')` +* `require('semver/ranges/valid')` diff --git a/node_modules/semver/bin/semver.js b/node_modules/semver/bin/semver.js new file mode 100755 index 0000000..8d1b557 --- /dev/null +++ b/node_modules/semver/bin/semver.js @@ -0,0 +1,183 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +const semver = require('../') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl } + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + return success(versions) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const success = () => { + const compare = reverse ? 'rcompare' : 'compare' + versions.sort((a, b) => { + return semver[compare](a, b, options) + }).map((v) => { + return semver.clean(v, options) + }).map((v) => { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach((v, i, _) => { + console.log(v) + }) +} + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/node_modules/semver/classes/comparator.js b/node_modules/semver/classes/comparator.js new file mode 100644 index 0000000..62cd204 --- /dev/null +++ b/node_modules/semver/classes/comparator.js @@ -0,0 +1,136 @@ +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false, + } + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + const sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + const sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + const sameSemVer = this.semver.version === comp.semver.version + const differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + const oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<') + const oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>') + + return ( + sameDirectionIncreasing || + sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || + oppositeDirectionsGreaterThan + ) + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/node_modules/semver/classes/index.js b/node_modules/semver/classes/index.js new file mode 100644 index 0000000..5e3f5c9 --- /dev/null +++ b/node_modules/semver/classes/index.js @@ -0,0 +1,5 @@ +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/node_modules/semver/classes/range.js b/node_modules/semver/classes/range.js new file mode 100644 index 0000000..a791d91 --- /dev/null +++ b/node_modules/semver/classes/range.js @@ -0,0 +1,522 @@ +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${range}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => { + return comps.join(' ').trim() + }) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + range = range.trim() + + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = Object.keys(this.options).join(',') + const memoKey = `parseRange:${memoOpts}:${range}` + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} +module.exports = Range + +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => + comp.trim().split(/\s+/).map((c) => { + return replaceTilde(c, options) + }).join(' ') + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => + comp.trim().split(/\s+/).map((c) => { + return replaceCaret(c, options) + }).join(' ') + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map((c) => { + return replaceXRange(c, options) + }).join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp.trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return (`${from} ${to}`).trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/node_modules/semver/classes/semver.js b/node_modules/semver/classes/semver.js new file mode 100644 index 0000000..af62955 --- /dev/null +++ b/node_modules/semver/classes/semver.js @@ -0,0 +1,287 @@ +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid Version: ${version}`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.format() + this.raw = this.version + return this + } +} + +module.exports = SemVer diff --git a/node_modules/semver/functions/clean.js b/node_modules/semver/functions/clean.js new file mode 100644 index 0000000..811fe6b --- /dev/null +++ b/node_modules/semver/functions/clean.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/node_modules/semver/functions/cmp.js b/node_modules/semver/functions/cmp.js new file mode 100644 index 0000000..4011909 --- /dev/null +++ b/node_modules/semver/functions/cmp.js @@ -0,0 +1,52 @@ +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/node_modules/semver/functions/coerce.js b/node_modules/semver/functions/coerce.js new file mode 100644 index 0000000..2e01452 --- /dev/null +++ b/node_modules/semver/functions/coerce.js @@ -0,0 +1,52 @@ +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + let next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) +} +module.exports = coerce diff --git a/node_modules/semver/functions/compare-build.js b/node_modules/semver/functions/compare-build.js new file mode 100644 index 0000000..9eb881b --- /dev/null +++ b/node_modules/semver/functions/compare-build.js @@ -0,0 +1,7 @@ +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/node_modules/semver/functions/compare-loose.js b/node_modules/semver/functions/compare-loose.js new file mode 100644 index 0000000..4881fbe --- /dev/null +++ b/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/node_modules/semver/functions/compare.js b/node_modules/semver/functions/compare.js new file mode 100644 index 0000000..748b7af --- /dev/null +++ b/node_modules/semver/functions/compare.js @@ -0,0 +1,5 @@ +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/node_modules/semver/functions/diff.js b/node_modules/semver/functions/diff.js new file mode 100644 index 0000000..87200ef --- /dev/null +++ b/node_modules/semver/functions/diff.js @@ -0,0 +1,23 @@ +const parse = require('./parse') +const eq = require('./eq') + +const diff = (version1, version2) => { + if (eq(version1, version2)) { + return null + } else { + const v1 = parse(version1) + const v2 = parse(version2) + const hasPre = v1.prerelease.length || v2.prerelease.length + const prefix = hasPre ? 'pre' : '' + const defaultResult = hasPre ? 'prerelease' : '' + for (const key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} +module.exports = diff diff --git a/node_modules/semver/functions/eq.js b/node_modules/semver/functions/eq.js new file mode 100644 index 0000000..271fed9 --- /dev/null +++ b/node_modules/semver/functions/eq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/node_modules/semver/functions/gt.js b/node_modules/semver/functions/gt.js new file mode 100644 index 0000000..d9b2156 --- /dev/null +++ b/node_modules/semver/functions/gt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/node_modules/semver/functions/gte.js b/node_modules/semver/functions/gte.js new file mode 100644 index 0000000..5aeaa63 --- /dev/null +++ b/node_modules/semver/functions/gte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/node_modules/semver/functions/inc.js b/node_modules/semver/functions/inc.js new file mode 100644 index 0000000..62d1da2 --- /dev/null +++ b/node_modules/semver/functions/inc.js @@ -0,0 +1,18 @@ +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier) => { + if (typeof (options) === 'string') { + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/node_modules/semver/functions/lt.js b/node_modules/semver/functions/lt.js new file mode 100644 index 0000000..b440ab7 --- /dev/null +++ b/node_modules/semver/functions/lt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/node_modules/semver/functions/lte.js b/node_modules/semver/functions/lte.js new file mode 100644 index 0000000..6dcc956 --- /dev/null +++ b/node_modules/semver/functions/lte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/node_modules/semver/functions/major.js b/node_modules/semver/functions/major.js new file mode 100644 index 0000000..4283165 --- /dev/null +++ b/node_modules/semver/functions/major.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/node_modules/semver/functions/minor.js b/node_modules/semver/functions/minor.js new file mode 100644 index 0000000..57b3455 --- /dev/null +++ b/node_modules/semver/functions/minor.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/node_modules/semver/functions/neq.js b/node_modules/semver/functions/neq.js new file mode 100644 index 0000000..f944c01 --- /dev/null +++ b/node_modules/semver/functions/neq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/node_modules/semver/functions/parse.js b/node_modules/semver/functions/parse.js new file mode 100644 index 0000000..a66663a --- /dev/null +++ b/node_modules/semver/functions/parse.js @@ -0,0 +1,33 @@ +const { MAX_LENGTH } = require('../internal/constants') +const { re, t } = require('../internal/re') +const SemVer = require('../classes/semver') + +const parseOptions = require('../internal/parse-options') +const parse = (version, options) => { + options = parseOptions(options) + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + const r = options.loose ? re[t.LOOSE] : re[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +module.exports = parse diff --git a/node_modules/semver/functions/patch.js b/node_modules/semver/functions/patch.js new file mode 100644 index 0000000..63afca2 --- /dev/null +++ b/node_modules/semver/functions/patch.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/node_modules/semver/functions/prerelease.js b/node_modules/semver/functions/prerelease.js new file mode 100644 index 0000000..06aa132 --- /dev/null +++ b/node_modules/semver/functions/prerelease.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/node_modules/semver/functions/rcompare.js b/node_modules/semver/functions/rcompare.js new file mode 100644 index 0000000..0ac509e --- /dev/null +++ b/node_modules/semver/functions/rcompare.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/node_modules/semver/functions/rsort.js b/node_modules/semver/functions/rsort.js new file mode 100644 index 0000000..82404c5 --- /dev/null +++ b/node_modules/semver/functions/rsort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/node_modules/semver/functions/satisfies.js b/node_modules/semver/functions/satisfies.js new file mode 100644 index 0000000..50af1c1 --- /dev/null +++ b/node_modules/semver/functions/satisfies.js @@ -0,0 +1,10 @@ +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/node_modules/semver/functions/sort.js b/node_modules/semver/functions/sort.js new file mode 100644 index 0000000..4d10917 --- /dev/null +++ b/node_modules/semver/functions/sort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/node_modules/semver/functions/valid.js b/node_modules/semver/functions/valid.js new file mode 100644 index 0000000..f27bae1 --- /dev/null +++ b/node_modules/semver/functions/valid.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/node_modules/semver/index.js b/node_modules/semver/index.js new file mode 100644 index 0000000..4a342c6 --- /dev/null +++ b/node_modules/semver/index.js @@ -0,0 +1,88 @@ +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/constants.js b/node_modules/semver/internal/constants.js new file mode 100644 index 0000000..4f0de59 --- /dev/null +++ b/node_modules/semver/internal/constants.js @@ -0,0 +1,17 @@ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +module.exports = { + SEMVER_SPEC_VERSION, + MAX_LENGTH, + MAX_SAFE_INTEGER, + MAX_SAFE_COMPONENT_LENGTH, +} diff --git a/node_modules/semver/internal/debug.js b/node_modules/semver/internal/debug.js new file mode 100644 index 0000000..1c00e13 --- /dev/null +++ b/node_modules/semver/internal/debug.js @@ -0,0 +1,9 @@ +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/node_modules/semver/internal/identifiers.js b/node_modules/semver/internal/identifiers.js new file mode 100644 index 0000000..e612d0a --- /dev/null +++ b/node_modules/semver/internal/identifiers.js @@ -0,0 +1,23 @@ +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/parse-options.js b/node_modules/semver/internal/parse-options.js new file mode 100644 index 0000000..bbd9ec7 --- /dev/null +++ b/node_modules/semver/internal/parse-options.js @@ -0,0 +1,11 @@ +// parse out just the options we care about so we always get a consistent +// obj with keys in a consistent order. +const opts = ['includePrerelease', 'loose', 'rtl'] +const parseOptions = options => + !options ? {} + : typeof options !== 'object' ? { loose: true } + : opts.filter(k => options[k]).reduce((o, k) => { + o[k] = true + return o + }, {}) +module.exports = parseOptions diff --git a/node_modules/semver/internal/re.js b/node_modules/semver/internal/re.js new file mode 100644 index 0000000..ed88398 --- /dev/null +++ b/node_modules/semver/internal/re.js @@ -0,0 +1,182 @@ +const { MAX_SAFE_COMPONENT_LENGTH } = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const createToken = (name, value, isGlobal) => { + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*') + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+') + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCE', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json new file mode 100644 index 0000000..72d3f66 --- /dev/null +++ b/node_modules/semver/package.json @@ -0,0 +1,86 @@ +{ + "name": "semver", + "version": "7.3.8", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "4.4.4", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "check-coverage": true, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.4.4", + "engines": ">=10", + "content": "./scripts", + "ciVersions": [ + "10.0.0", + "10.x", + "12.x", + "14.x", + "16.x", + "18.x" + ], + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf" + ] + } +} diff --git a/node_modules/semver/preload.js b/node_modules/semver/preload.js new file mode 100644 index 0000000..947cd4f --- /dev/null +++ b/node_modules/semver/preload.js @@ -0,0 +1,2 @@ +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/node_modules/semver/range.bnf b/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/semver/ranges/gtr.js b/node_modules/semver/ranges/gtr.js new file mode 100644 index 0000000..db7e355 --- /dev/null +++ b/node_modules/semver/ranges/gtr.js @@ -0,0 +1,4 @@ +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/node_modules/semver/ranges/intersects.js b/node_modules/semver/ranges/intersects.js new file mode 100644 index 0000000..3d1a6f3 --- /dev/null +++ b/node_modules/semver/ranges/intersects.js @@ -0,0 +1,7 @@ +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} +module.exports = intersects diff --git a/node_modules/semver/ranges/ltr.js b/node_modules/semver/ranges/ltr.js new file mode 100644 index 0000000..528a885 --- /dev/null +++ b/node_modules/semver/ranges/ltr.js @@ -0,0 +1,4 @@ +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/node_modules/semver/ranges/max-satisfying.js b/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 0000000..6e3d993 --- /dev/null +++ b/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,25 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/node_modules/semver/ranges/min-satisfying.js b/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 0000000..9b60974 --- /dev/null +++ b/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,24 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/node_modules/semver/ranges/min-version.js b/node_modules/semver/ranges/min-version.js new file mode 100644 index 0000000..350e1f7 --- /dev/null +++ b/node_modules/semver/ranges/min-version.js @@ -0,0 +1,61 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/node_modules/semver/ranges/outside.js b/node_modules/semver/ranges/outside.js new file mode 100644 index 0000000..ae99b10 --- /dev/null +++ b/node_modules/semver/ranges/outside.js @@ -0,0 +1,80 @@ +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/node_modules/semver/ranges/simplify.js b/node_modules/semver/ranges/simplify.js new file mode 100644 index 0000000..618d5b6 --- /dev/null +++ b/node_modules/semver/ranges/simplify.js @@ -0,0 +1,47 @@ +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/node_modules/semver/ranges/subset.js b/node_modules/semver/ranges/subset.js new file mode 100644 index 0000000..e0dea43 --- /dev/null +++ b/node_modules/semver/ranges/subset.js @@ -0,0 +1,244 @@ +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = [new Comparator('>=0.0.0-0')] + } else { + sub = [new Comparator('>=0.0.0')] + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = [new Comparator('>=0.0.0')] + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/node_modules/semver/ranges/to-comparators.js b/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 0000000..6c8bc7e --- /dev/null +++ b/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,8 @@ +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/node_modules/semver/ranges/valid.js b/node_modules/semver/ranges/valid.js new file mode 100644 index 0000000..365f356 --- /dev/null +++ b/node_modules/semver/ranges/valid.js @@ -0,0 +1,11 @@ +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/node_modules/side-channel/.eslintignore b/node_modules/side-channel/.eslintignore new file mode 100644 index 0000000..404abb2 --- /dev/null +++ b/node_modules/side-channel/.eslintignore @@ -0,0 +1 @@ +coverage/ diff --git a/node_modules/side-channel/.eslintrc b/node_modules/side-channel/.eslintrc new file mode 100644 index 0000000..850ac1f --- /dev/null +++ b/node_modules/side-channel/.eslintrc @@ -0,0 +1,11 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "max-lines-per-function": 0, + "max-params": 0, + "new-cap": [2, { "capIsNewExceptions": ["GetIntrinsic"] }], + }, +} diff --git a/node_modules/side-channel/.github/FUNDING.yml b/node_modules/side-channel/.github/FUNDING.yml new file mode 100644 index 0000000..2a94840 --- /dev/null +++ b/node_modules/side-channel/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/side-channel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/side-channel/.nycrc b/node_modules/side-channel/.nycrc new file mode 100644 index 0000000..1826526 --- /dev/null +++ b/node_modules/side-channel/.nycrc @@ -0,0 +1,13 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/side-channel/CHANGELOG.md b/node_modules/side-channel/CHANGELOG.md new file mode 100644 index 0000000..a3d161f --- /dev/null +++ b/node_modules/side-channel/CHANGELOG.md @@ -0,0 +1,65 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.4](https://github.com/ljharb/side-channel/compare/v1.0.3...v1.0.4) - 2020-12-29 + +### Commits + +- [Tests] migrate tests to Github Actions [`10909cb`](https://github.com/ljharb/side-channel/commit/10909cbf8ce9c0bf96f604cf13d7ffd5a22c2d40) +- [Refactor] Use a linked list rather than an array, and move accessed nodes to the beginning [`195613f`](https://github.com/ljharb/side-channel/commit/195613f28b5c1e6072ef0b61b5beebaf2b6a304e) +- [meta] do not publish github action workflow files [`290ec29`](https://github.com/ljharb/side-channel/commit/290ec29cd21a60585145b4a7237ec55228c52c27) +- [Tests] run `nyc` on all tests; use `tape` runner [`ea6d030`](https://github.com/ljharb/side-channel/commit/ea6d030ff3fe6be2eca39e859d644c51ecd88869) +- [actions] add "Allow Edits" workflow [`d464d8f`](https://github.com/ljharb/side-channel/commit/d464d8fe52b5eddf1504a0ed97f0941a90f32c15) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog` [`02daca8`](https://github.com/ljharb/side-channel/commit/02daca87c6809821c97be468d1afa2f5ef447383) +- [Refactor] use `call-bind` and `get-intrinsic` instead of `es-abstract` [`e09d481`](https://github.com/ljharb/side-channel/commit/e09d481528452ebafa5cdeae1af665c35aa2deee) +- [Deps] update `object.assign` [`ee83aa8`](https://github.com/ljharb/side-channel/commit/ee83aa81df313b5e46319a63adb05cf0c179079a) +- [actions] update rebase action to use checkout v2 [`7726b0b`](https://github.com/ljharb/side-channel/commit/7726b0b058b632fccea709f58960871defaaa9d7) + +## [v1.0.3](https://github.com/ljharb/side-channel/compare/v1.0.2...v1.0.3) - 2020-08-23 + +### Commits + +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `tape` [`1f10561`](https://github.com/ljharb/side-channel/commit/1f105611ef3acf32dec8032ae5c0baa5e56bb868) +- [Deps] update `es-abstract`, `object-inspect` [`bc20159`](https://github.com/ljharb/side-channel/commit/bc201597949a505e37cef9eaf24c7010831e6f03) +- [Dev Deps] update `@ljharb/eslint-config`, `tape` [`b9b2b22`](https://github.com/ljharb/side-channel/commit/b9b2b225f9e0ea72a6ec2b89348f0bd690bc9ed1) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` [`7055ab4`](https://github.com/ljharb/side-channel/commit/7055ab4de0860606efd2003674a74f1fe6ebc07e) +- [Dev Deps] update `auto-changelog`; add `aud` [`d278c37`](https://github.com/ljharb/side-channel/commit/d278c37d08227be4f84aa769fcd919e73feeba40) +- [actions] switch Automatic Rebase workflow to `pull_request_target` event [`3bcf982`](https://github.com/ljharb/side-channel/commit/3bcf982faa122745b39c33ce83d32fdf003741c6) +- [Tests] only audit prod deps [`18d01c4`](https://github.com/ljharb/side-channel/commit/18d01c4015b82a3d75044c4d5ba7917b2eac01ec) +- [Deps] update `es-abstract` [`6ab096d`](https://github.com/ljharb/side-channel/commit/6ab096d9de2b482cf5e0717e34e212f5b2b9bc9a) +- [Dev Deps] update `tape` [`9dc174c`](https://github.com/ljharb/side-channel/commit/9dc174cc651dfd300b4b72da936a0a7eda5f9452) +- [Deps] update `es-abstract` [`431d0f0`](https://github.com/ljharb/side-channel/commit/431d0f0ff11fbd2ae6f3115582a356d3a1cfce82) +- [Deps] update `es-abstract` [`49869fd`](https://github.com/ljharb/side-channel/commit/49869fd323bf4453f0ba515c0fb265cf5ab7b932) +- [meta] Add package.json to package's exports [`77d9cdc`](https://github.com/ljharb/side-channel/commit/77d9cdceb2a9e47700074f2ae0c0a202e7dac0d4) + +## [v1.0.2](https://github.com/ljharb/side-channel/compare/v1.0.1...v1.0.2) - 2019-12-20 + +### Commits + +- [Dev Deps] update `@ljharb/eslint-config`, `tape` [`4a526df`](https://github.com/ljharb/side-channel/commit/4a526df44e4701566ed001ec78546193f818b082) +- [Deps] update `es-abstract` [`d4f6e62`](https://github.com/ljharb/side-channel/commit/d4f6e629b6fb93a07415db7f30d3c90fd7f264fe) + +## [v1.0.1](https://github.com/ljharb/side-channel/compare/v1.0.0...v1.0.1) - 2019-12-01 + +### Commits + +- [Fix] add missing "exports" [`d212907`](https://github.com/ljharb/side-channel/commit/d2129073abf0701a5343bf28aa2145617604dc2e) + +## v1.0.0 - 2019-12-01 + +### Commits + +- Initial implementation [`dbebd3a`](https://github.com/ljharb/side-channel/commit/dbebd3a4b5ed64242f9a6810efe7c4214cd8cde4) +- Initial tests [`73bdefe`](https://github.com/ljharb/side-channel/commit/73bdefe568c9076cf8c0b8719bc2141aec0e19b8) +- Initial commit [`43c03e1`](https://github.com/ljharb/side-channel/commit/43c03e1c2849ec50a87b7a5cd76238a62b0b8770) +- npm init [`5c090a7`](https://github.com/ljharb/side-channel/commit/5c090a765d66a5527d9889b89aeff78dee91348c) +- [meta] add `auto-changelog` [`a5c4e56`](https://github.com/ljharb/side-channel/commit/a5c4e5675ec02d5eb4d84b4243aeea2a1d38fbec) +- [actions] add automatic rebasing / merge commit blocking [`bab1683`](https://github.com/ljharb/side-channel/commit/bab1683d8f9754b086e94397699fdc645e0d7077) +- [meta] add `funding` field; create FUNDING.yml [`63d7aea`](https://github.com/ljharb/side-channel/commit/63d7aeaf34f5650650ae97ca4b9fae685bd0937c) +- [Tests] add `npm run lint` [`46a5a81`](https://github.com/ljharb/side-channel/commit/46a5a81705cd2664f83df232c01dbbf2ee952885) +- Only apps should have lockfiles [`8b16b03`](https://github.com/ljharb/side-channel/commit/8b16b0305f00895d90c4e2e5773c854cfea0e448) +- [meta] add `safe-publish-latest` [`2f098ef`](https://github.com/ljharb/side-channel/commit/2f098ef092a39399cfe548b19a1fc03c2fd2f490) diff --git a/node_modules/side-channel/LICENSE b/node_modules/side-channel/LICENSE new file mode 100644 index 0000000..3900dd7 --- /dev/null +++ b/node_modules/side-channel/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/side-channel/README.md b/node_modules/side-channel/README.md new file mode 100644 index 0000000..7fa4f06 --- /dev/null +++ b/node_modules/side-channel/README.md @@ -0,0 +1,2 @@ +# side-channel +Store information about any JS value in a side channel. Uses WeakMap if available. diff --git a/node_modules/side-channel/index.js b/node_modules/side-channel/index.js new file mode 100644 index 0000000..f1c4826 --- /dev/null +++ b/node_modules/side-channel/index.js @@ -0,0 +1,124 @@ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); +var callBound = require('call-bind/callBound'); +var inspect = require('object-inspect'); + +var $TypeError = GetIntrinsic('%TypeError%'); +var $WeakMap = GetIntrinsic('%WeakMap%', true); +var $Map = GetIntrinsic('%Map%', true); + +var $weakMapGet = callBound('WeakMap.prototype.get', true); +var $weakMapSet = callBound('WeakMap.prototype.set', true); +var $weakMapHas = callBound('WeakMap.prototype.has', true); +var $mapGet = callBound('Map.prototype.get', true); +var $mapSet = callBound('Map.prototype.set', true); +var $mapHas = callBound('Map.prototype.has', true); + +/* + * This function traverses the list returning the node corresponding to the + * given key. + * + * That node is also moved to the head of the list, so that if it's accessed + * again we don't need to traverse the whole list. By doing so, all the recently + * used nodes can be accessed relatively quickly. + */ +var listGetNode = function (list, key) { // eslint-disable-line consistent-return + for (var prev = list, curr; (curr = prev.next) !== null; prev = curr) { + if (curr.key === key) { + prev.next = curr.next; + curr.next = list.next; + list.next = curr; // eslint-disable-line no-param-reassign + return curr; + } + } +}; + +var listGet = function (objects, key) { + var node = listGetNode(objects, key); + return node && node.value; +}; +var listSet = function (objects, key, value) { + var node = listGetNode(objects, key); + if (node) { + node.value = value; + } else { + // Prepend the new node to the beginning of the list + objects.next = { // eslint-disable-line no-param-reassign + key: key, + next: objects.next, + value: value + }; + } +}; +var listHas = function (objects, key) { + return !!listGetNode(objects, key); +}; + +module.exports = function getSideChannel() { + var $wm; + var $m; + var $o; + var channel = { + assert: function (key) { + if (!channel.has(key)) { + throw new $TypeError('Side channel does not contain ' + inspect(key)); + } + }, + get: function (key) { // eslint-disable-line consistent-return + if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) { + if ($wm) { + return $weakMapGet($wm, key); + } + } else if ($Map) { + if ($m) { + return $mapGet($m, key); + } + } else { + if ($o) { // eslint-disable-line no-lonely-if + return listGet($o, key); + } + } + }, + has: function (key) { + if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) { + if ($wm) { + return $weakMapHas($wm, key); + } + } else if ($Map) { + if ($m) { + return $mapHas($m, key); + } + } else { + if ($o) { // eslint-disable-line no-lonely-if + return listHas($o, key); + } + } + return false; + }, + set: function (key, value) { + if ($WeakMap && key && (typeof key === 'object' || typeof key === 'function')) { + if (!$wm) { + $wm = new $WeakMap(); + } + $weakMapSet($wm, key, value); + } else if ($Map) { + if (!$m) { + $m = new $Map(); + } + $mapSet($m, key, value); + } else { + if (!$o) { + /* + * Initialize the linked list as an empty node, so that we don't have + * to special-case handling of the first node: we can always refer to + * it as (previous node).next, instead of something like (list).head + */ + $o = { key: {}, next: null }; + } + listSet($o, key, value); + } + } + }; + return channel; +}; diff --git a/node_modules/side-channel/package.json b/node_modules/side-channel/package.json new file mode 100644 index 0000000..a3e33f6 --- /dev/null +++ b/node_modules/side-channel/package.json @@ -0,0 +1,67 @@ +{ + "name": "side-channel", + "version": "1.0.4", + "description": "Store information about any JS value in a side channel. Uses WeakMap if available.", + "main": "index.js", + "exports": { + "./package.json": "./package.json", + ".": [ + { + "default": "./index.js" + }, + "./index.js" + ] + }, + "scripts": { + "prepublish": "safe-publish-latest", + "lint": "eslint .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "npx aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/side-channel.git" + }, + "keywords": [ + "weakmap", + "map", + "side", + "channel", + "metadata" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/side-channel/issues" + }, + "homepage": "https://github.com/ljharb/side-channel#readme", + "devDependencies": { + "@ljharb/eslint-config": "^17.3.0", + "aud": "^1.1.3", + "auto-changelog": "^2.2.1", + "eslint": "^7.16.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^1.1.4", + "tape": "^5.0.1" + }, + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + } +} diff --git a/node_modules/side-channel/test/index.js b/node_modules/side-channel/test/index.js new file mode 100644 index 0000000..3b92ef7 --- /dev/null +++ b/node_modules/side-channel/test/index.js @@ -0,0 +1,78 @@ +'use strict'; + +var test = require('tape'); + +var getSideChannel = require('../'); + +test('export', function (t) { + t.equal(typeof getSideChannel, 'function', 'is a function'); + t.equal(getSideChannel.length, 0, 'takes no arguments'); + + var channel = getSideChannel(); + t.ok(channel, 'is truthy'); + t.equal(typeof channel, 'object', 'is an object'); + + t.end(); +}); + +test('assert', function (t) { + var channel = getSideChannel(); + t['throws']( + function () { channel.assert({}); }, + TypeError, + 'nonexistent value throws' + ); + + var o = {}; + channel.set(o, 'data'); + t.doesNotThrow(function () { channel.assert(o); }, 'existent value noops'); + + t.end(); +}); + +test('has', function (t) { + var channel = getSideChannel(); + var o = []; + + t.equal(channel.has(o), false, 'nonexistent value yields false'); + + channel.set(o, 'foo'); + t.equal(channel.has(o), true, 'existent value yields true'); + + t.end(); +}); + +test('get', function (t) { + var channel = getSideChannel(); + var o = {}; + t.equal(channel.get(o), undefined, 'nonexistent value yields undefined'); + + var data = {}; + channel.set(o, data); + t.equal(channel.get(o), data, '"get" yields data set by "set"'); + + t.end(); +}); + +test('set', function (t) { + var channel = getSideChannel(); + var o = function () {}; + t.equal(channel.get(o), undefined, 'value not set'); + + channel.set(o, 42); + t.equal(channel.get(o), 42, 'value was set'); + + channel.set(o, Infinity); + t.equal(channel.get(o), Infinity, 'value was set again'); + + var o2 = {}; + channel.set(o2, 17); + t.equal(channel.get(o), Infinity, 'o is not modified'); + t.equal(channel.get(o2), 17, 'o2 is set'); + + channel.set(o, 14); + t.equal(channel.get(o), 14, 'o is modified'); + t.equal(channel.get(o2), 17, 'o2 is not modified'); + + t.end(); +}); diff --git a/node_modules/smee-client/LICENSE b/node_modules/smee-client/LICENSE new file mode 100644 index 0000000..2d6ce7e --- /dev/null +++ b/node_modules/smee-client/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) , Jason Etcovitch (https://github.com/jasonetco/grapplehook) + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/smee-client/README.md b/node_modules/smee-client/README.md new file mode 100644 index 0000000..3c65746 --- /dev/null +++ b/node_modules/smee-client/README.md @@ -0,0 +1,42 @@ +

smee-client

+

Client and CLI for smee.io, a service that delivers webhooks to your local development environment.

+

NPM Build Status Codecov

+ +

Looking for probot/smee.io?

+ +## Installation + +Install the client with: + +``` +$ npm install -g smee-client +``` + +## Usage + +### CLI + +The `smee` command will forward webhooks from smee.io to your local development environment. + +``` +$ smee +``` + +Run `smee --help` for usage. + +### Node Client + +```js +const SmeeClient = require('smee-client') + +const smee = new SmeeClient({ + source: 'https://smee.io/abc123', + target: 'http://localhost:3000/events', + logger: console +}) + +const events = smee.start() + +// Stop forwarding events +events.close() +``` diff --git a/node_modules/smee-client/bin/smee.js b/node_modules/smee-client/bin/smee.js new file mode 100755 index 0000000..aaec6b1 --- /dev/null +++ b/node_modules/smee-client/bin/smee.js @@ -0,0 +1,35 @@ +#!/usr/bin/env node + +const program = require('commander') +const { version } = require('../package.json') + +const Client = require('..') + +program + .version(version, '-v, --version') + .usage('[options]') + .option('-u, --url ', 'URL of the webhook proxy service. Default: https://smee.io/new') + .option('-t, --target ', 'Full URL (including protocol and path) of the target service the events will forwarded to. Default: http://127.0.0.1:PORT/PATH') + .option('-p, --port ', 'Local HTTP server port', process.env.PORT || 3000) + .option('-P, --path ', 'URL path to post proxied requests to`', '/') + .parse(process.argv) + +let target +if (program.target) { + target = program.target +} else { + target = `http://127.0.0.1:${program.port}${program.path}` +} + +async function setup () { + let source = program.url + + if (!source) { + source = await Client.createChannel() + } + + const client = new Client({ source, target }) + client.start() +} + +setup() diff --git a/node_modules/smee-client/index.d.ts b/node_modules/smee-client/index.d.ts new file mode 100644 index 0000000..cf2fe8e --- /dev/null +++ b/node_modules/smee-client/index.d.ts @@ -0,0 +1,20 @@ +import EventSource from 'eventsource'; +declare type Severity = 'info' | 'error'; +interface Options { + source: string; + target: string; + logger?: Pick; +} +declare class Client { + source: string; + target: string; + logger: Pick; + events: EventSource; + constructor({ source, target, logger }: Options); + static createChannel(): Promise; + onmessage(msg: any): void; + onopen(): void; + onerror(err: any): void; + start(): EventSource; +} +export = Client; diff --git a/node_modules/smee-client/index.js b/node_modules/smee-client/index.js new file mode 100644 index 0000000..766a7ca --- /dev/null +++ b/node_modules/smee-client/index.js @@ -0,0 +1,62 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const validator_1 = __importDefault(require("validator")); +const eventsource_1 = __importDefault(require("eventsource")); +const superagent_1 = __importDefault(require("superagent")); +const url_1 = __importDefault(require("url")); +const querystring_1 = __importDefault(require("querystring")); +class Client { + constructor({ source, target, logger = console }) { + this.source = source; + this.target = target; + this.logger = logger; + if (!validator_1.default.isURL(this.source)) { + throw new Error('The provided URL is invalid.'); + } + } + static async createChannel() { + return superagent_1.default.head('https://smee.io/new').redirects(0).catch((err) => { + return err.response.headers.location; + }); + } + onmessage(msg) { + const data = JSON.parse(msg.data); + const target = url_1.default.parse(this.target, true); + const mergedQuery = Object.assign(target.query, data.query); + target.search = querystring_1.default.stringify(mergedQuery); + delete data.query; + const req = superagent_1.default.post(url_1.default.format(target)).send(data.body); + delete data.body; + Object.keys(data).forEach(key => { + req.set(key, data[key]); + }); + req.end((err, res) => { + if (err) { + this.logger.error(err); + } + else { + this.logger.info(`${req.method} ${req.url} - ${res.status}`); + } + }); + } + onopen() { + this.logger.info('Connected', this.events.url); + } + onerror(err) { + this.logger.error(err); + } + start() { + const events = new eventsource_1.default(this.source); + // Reconnect immediately + events.reconnectInterval = 0; // This isn't a valid property of EventSource + events.addEventListener('message', this.onmessage.bind(this)); + events.addEventListener('open', this.onopen.bind(this)); + events.addEventListener('error', this.onerror.bind(this)); + this.logger.info(`Forwarding ${this.source} to ${this.target}`); + this.events = events; + return events; + } +} +module.exports = Client; diff --git a/node_modules/smee-client/node_modules/eventsource/.editorconfig b/node_modules/smee-client/node_modules/eventsource/.editorconfig new file mode 100644 index 0000000..88f10ae --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/.editorconfig @@ -0,0 +1,27 @@ +# http://editorconfig.org +root = true + +[*] +# Use hard or soft tabs +indent_style = space + +# Size of a single indent +indent_size = tab + +# Number of columns representing a tab character +tab_width = 2 + +# Use line-feed as EOL indicator +end_of_line = lf + +# Use UTF-8 character encoding for all files +charset = utf-8 + +# Remove any whitespace characters preceding newline characters +trim_trailing_whitespace = true + +# Ensure file ends with a newline when saving +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/node_modules/smee-client/node_modules/eventsource/.travis.yml b/node_modules/smee-client/node_modules/eventsource/.travis.yml new file mode 100644 index 0000000..d73ebe6 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/.travis.yml @@ -0,0 +1,10 @@ +language: node_js + +node_js: + - "0.12" + - "4" + - "6" + - "8" + - "10" + - "12" + - "13" diff --git a/node_modules/smee-client/node_modules/eventsource/CONTRIBUTING.md b/node_modules/smee-client/node_modules/eventsource/CONTRIBUTING.md new file mode 100644 index 0000000..f66db3b --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/CONTRIBUTING.md @@ -0,0 +1,13 @@ +# Contributing to EventSource + +If you add or fix something, add tests. + +## Release process + +Update `HISTORY.md`, Then: + + npm outdated --depth 0 # See if you can upgrade something + npm run polyfill + git commit ... + npm version [major|minor|patch] + npm publish diff --git a/node_modules/smee-client/node_modules/eventsource/HISTORY.md b/node_modules/smee-client/node_modules/eventsource/HISTORY.md new file mode 100644 index 0000000..8af6c27 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/HISTORY.md @@ -0,0 +1,151 @@ +# [1.1.2](https://github.com/EventSource/eventsource/compare/v1.1.1...v1.1.2) + +* Inline origin resolution, drops `original` dependency ([#281](https://github.com/EventSource/eventsource/pull/281) Espen Hovlandsdal) + +# [1.1.1](https://github.com/EventSource/eventsource/compare/v1.1.0...v1.1.1) + +* Do not include authorization and cookie headers on redirect to different origin ([#273](https://github.com/EventSource/eventsource/pull/273) Espen Hovlandsdal) + +# [1.1.0](https://github.com/EventSource/eventsource/compare/v1.0.7...v1.1.0) + +* Improve performance for large messages across many chunks ([#130](https://github.com/EventSource/eventsource/pull/130) Trent Willis) +* Add `createConnection` option for http or https requests ([#120](https://github.com/EventSource/eventsource/pull/120) Vasily Lavrov) +* Support HTTP 302 redirects ([#116](https://github.com/EventSource/eventsource/pull/116) Ryan Bonte) +* Prevent sequential errors from attempting multiple reconnections ([#125](https://github.com/EventSource/eventsource/pull/125) David Patty) +* Add `new` to correct test ([#111](https://github.com/EventSource/eventsource/pull/101) Stéphane Alnet) +* Fix reconnections attempts now happen more than once ([#136](https://github.com/EventSource/eventsource/pull/136) Icy Fish) + +# [1.0.7](https://github.com/EventSource/eventsource/compare/v1.0.6...v1.0.7) + +* Add dispatchEvent to EventSource ([#101](https://github.com/EventSource/eventsource/pull/101) Ali Afroozeh) +* Added `checkServerIdentity` option ([#104](https://github.com/EventSource/eventsource/pull/104) cintolas) +* Surface request error message ([#107](https://github.com/EventSource/eventsource/pull/107) RasPhilCo) + +# [1.0.6](https://github.com/EventSource/eventsource/compare/v1.0.5...v1.0.6) + +* Fix issue where a unicode sequence split in two chunks would lead to invalid messages ([#108](https://github.com/EventSource/eventsource/pull/108) Espen Hovlandsdal) +* Change example to use `eventsource/ssestream` (Aslak Hellesøy) + +# [1.0.5](https://github.com/EventSource/eventsource/compare/v1.0.4...v1.0.5) + +* Check for `window` existing before polyfilling. ([#80](https://github.com/EventSource/eventsource/pull/80) Neftaly Hernandez) + +# [1.0.4](https://github.com/EventSource/eventsource/compare/v1.0.2...v1.0.4) + +* Pass withCredentials on to the XHR. ([#79](https://github.com/EventSource/eventsource/pull/79) Ken Mayer) + +# [1.0.2](https://github.com/EventSource/eventsource/compare/v1.0.1...v1.0.2) + +* Fix proxy not working when proxy and target URL uses different protocols. ([#76](https://github.com/EventSource/eventsource/pull/76) Espen Hovlandsdal) +* Make `close()` a prototype method instead of an instance method. ([#77](https://github.com/EventSource/eventsource/pull/77) Espen Hovlandsdal) + +# [1.0.1](https://github.com/EventSource/eventsource/compare/v1.0.0...v1.0.1) + +* Reconnect if server responds with HTTP 500, 502, 503 or 504. ([#74](https://github.com/EventSource/eventsource/pull/74) Vykintas Narmontas) + +# [1.0.0](https://github.com/EventSource/eventsource/compare/v0.2.3...v1.0.0) + +* Add missing `removeEventListener`-method. ([#51](https://github.com/EventSource/eventsource/pull/51) Yucheng Tu / Espen Hovlandsdal) +* Fix EventSource reconnecting on non-200 responses. ([af84476](https://github.com/EventSource/eventsource/commit/af84476b519a01e61b8c80727261df52ae40022c) Espen Hovlandsdal) +* Add ability to customize https options. ([#53](https://github.com/EventSource/eventsource/pull/53) Rafael Alfaro) +* Add readyState constants to EventSource instances. ([#66](https://github.com/EventSource/eventsource/pull/66) Espen Hovlandsdal) + +# [0.2.3](https://github.com/EventSource/eventsource/compare/v0.2.2...v0.2.3) + +* Fix `onConnectionClosed` firing multiple times resulting in multiple connections. ([#61](https://github.com/EventSource/eventsource/pull/61) Phil Strong / Duncan Wong) +* Remove unneeded isPlainObject check for headers. ([#64](https://github.com/EventSource/eventsource/pull/64) David Mark) + +# [0.2.2](https://github.com/EventSource/eventsource/compare/v0.2.1...v0.2.2) + +* Don't include test files in npm package. ([#56](https://github.com/EventSource/eventsource/pull/56) eanplatter) + +# [0.2.1](https://github.com/EventSource/eventsource/compare/v0.2.0...v0.2.1) + +* Fix `close()` for polyfill. ([#52](https://github.com/EventSource/eventsource/pull/52) brian-medendorp) +* Add http/https proxy function. ([#46](https://github.com/EventSource/eventsource/pull/46) Eric Lu) +* Fix reconnect for polyfill. Only disable reconnect when server status is 204. (Aslak Hellesøy). +* Drop support for Node 0.10.x and older (Aslak Hellesøy). + +# [0.2.0](https://github.com/EventSource/eventsource/compare/v0.1.6...v0.2.0) + +* Renamed repository to `eventsource` (since it's not just Node, but also browser polyfill). (Aslak Hellesøy). +* Compatibility with webpack/browserify. ([#44](https://github.com/EventSource/eventsource/pull/44) Adriano Raiano). + +# [0.1.6](https://github.com/EventSource/eventsource/compare/v0.1.5...v0.1.6) + +* Ignore headers without a value. ([#41](https://github.com/EventSource/eventsource/issues/41), [#43](https://github.com/EventSource/eventsource/pull/43) Adriano Raiano) + +# [0.1.5](https://github.com/EventSource/eventsource/compare/v0.1.4...v0.1.5) + +* Refactor tests to support Node.js 0.12.0 and Io.js 1.1.0. (Aslak Hellesøy) + +# [0.1.4](https://github.com/EventSource/eventsource/compare/v0.1.3...master) + +* Bugfix: Added missing origin property. ([#39](https://github.com/EventSource/eventsource/pull/39), [#38](https://github.com/EventSource/eventsource/issues/38) Arnout Kazemier) +* Expose `status` property on `error` events. ([#40](https://github.com/EventSource/eventsource/pull/40) Adriano Raiano) + +# [0.1.3](https://github.com/EventSource/eventsource/compare/v0.1.2...v0.1.3) + +* Bugfix: Made message properties enumerable. ([#37](https://github.com/EventSource/eventsource/pull/37) Golo Roden) + +# [0.1.2](https://github.com/EventSource/eventsource/compare/v0.1.1...v0.1.2) + +* Bugfix: Blank lines not read. ([#35](https://github.com/EventSource/eventsource/issues/35), [#36](https://github.com/EventSource/eventsource/pull/36) Lesterpig) + +# [0.1.1](https://github.com/EventSource/eventsource/compare/v0.1.0...v0.1.1) + +* Bugfix: Fix message type. ([#33](https://github.com/EventSource/eventsource/pull/33) Romain Gauthier) + +# [0.1.0](https://github.com/EventSource/eventsource/compare/v0.0.10...v0.1.0) + +* Bugfix: High CPU usage by replacing Jison with port of WebKit's parser. ([#25](https://github.com/EventSource/eventsource/issues/25), [#32](https://github.com/EventSource/eventsource/pull/32), [#18](https://github.com/EventSource/eventsource/issues/18) qqueue) +* Reformatted all code to 2 spaces. + +# [0.0.10](https://github.com/EventSource/eventsource/compare/v0.0.9...v0.0.10) + +* Provide `Event` argument on `open` and `error` event ([#30](https://github.com/EventSource/eventsource/issues/30), [#31](https://github.com/EventSource/eventsource/pull/31) Donghwan Kim) +* Expose `lastEventId` on messages. ([#28](https://github.com/EventSource/eventsource/pull/28) mbieser) + +# [0.0.9](https://github.com/EventSource/eventsource/compare/v0.0.8...v0.0.9) + +* Bugfix: old "last-event-id" used on reconnect ([#27](https://github.com/EventSource/eventsource/pull/27) Aslak Hellesøy) + +# [0.0.8](https://github.com/EventSource/eventsource/compare/v0.0.7...v0.0.8) + +* Bugfix: EventSource still reconnected when closed ([#24](https://github.com/EventSource/eventsource/pull/24) FrozenCow) +* Allow unauthorized HTTPS connections by setting `rejectUnauthorized` to false. (Aslak Hellesøy) + +# [0.0.7](https://github.com/EventSource/eventsource/compare/v0.0.6...v0.0.7) + +* Explicitly raise an error when server returns http 403 and don't continue ([#20](https://github.com/EventSource/eventsource/pull/20) Scott Moak) +* Added ability to send custom http headers to server ([#21](https://github.com/EventSource/eventsource/pull/21), [#9](https://github.com/EventSource/eventsource/issues/9) Scott Moak) +* Fix Unicode support to cope with Javascript Unicode size limitations ([#23](https://github.com/EventSource/eventsource/pull/23), [#22](https://github.com/EventSource/eventsource/issues/22) Devon Adkisson) +* Graceful handling of parse errors ([#19](https://github.com/EventSource/eventsource/issues/19) Aslak Hellesøy) +* Switched from testing with Nodeunit to Mocha (Aslak Hellesøy) + +# [0.0.6](https://github.com/EventSource/eventsource/compare/v0.0.5...v0.0.6) + +* Add Accept: text/event-stream header ([#17](https://github.com/EventSource/eventsource/pull/17) William Wicks) + +# [0.0.5](https://github.com/EventSource/eventsource/compare/v0.0.4...v0.0.5) + +* Add no-cache and https support ([#10](https://github.com/EventSource/eventsource/pull/10) Einar Otto Stangvik) +* Ensure that Last-Event-ID is sent to the server for reconnects, as defined in the spec ([#8](https://github.com/EventSource/eventsource/pull/8) Einar Otto Stangvik) +* Verify that CR and CRLF are accepted alongside LF ([#7](https://github.com/EventSource/eventsource/pull/7) Einar Otto Stangvik) +* Emit 'open' event ([#4](https://github.com/EventSource/eventsource/issues/4) Einar Otto Stangvik) + +# [0.0.4](https://github.com/EventSource/eventsource/compare/v0.0.3...v0.0.4) + +* Automatic reconnect every second if the server is down. Reconnect interval can be set with `reconnectInterval` (not in W3C spec). (Aslak Hellesøy) + +# [0.0.3](https://github.com/EventSource/eventsource/compare/v0.0.2...v0.0.3) + +* Jison based eventstream parser ([#2](https://github.com/EventSource/eventsource/pull/2) Einar Otto Stangvik) + +# [0.0.2](https://github.com/EventSource/eventsource/compare/v0.0.1...v0.0.2) + +* Use native EventListener (Aslak Hellesøy) + +# 0.0.1 + +* First release diff --git a/node_modules/smee-client/node_modules/eventsource/LICENSE b/node_modules/smee-client/node_modules/eventsource/LICENSE new file mode 100644 index 0000000..505b0d9 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/LICENSE @@ -0,0 +1,22 @@ +The MIT License + +Copyright (c) EventSource GitHub organisation + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/smee-client/node_modules/eventsource/README.md b/node_modules/smee-client/node_modules/eventsource/README.md new file mode 100644 index 0000000..ee8c8c0 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/README.md @@ -0,0 +1,89 @@ +# EventSource [![npm version](http://img.shields.io/npm/v/eventsource.svg?style=flat-square)](http://browsenpm.org/package/eventsource)[![Build Status](http://img.shields.io/travis/EventSource/eventsource/master.svg?style=flat-square)](https://travis-ci.org/EventSource/eventsource)[![NPM Downloads](https://img.shields.io/npm/dm/eventsource.svg?style=flat-square)](http://npm-stat.com/charts.html?package=eventsource&from=2015-09-01)[![Dependencies](https://img.shields.io/david/EventSource/eventsource.svg?style=flat-square)](https://david-dm.org/EventSource/eventsource) + +This library is a pure JavaScript implementation of the [EventSource](https://html.spec.whatwg.org/multipage/server-sent-events.html#server-sent-events) client. The API aims to be W3C compatible. + +You can use it with Node.js or as a browser polyfill for +[browsers that don't have native `EventSource` support](http://caniuse.com/#feat=eventsource). + +## Install + + npm install eventsource + +## Example + + npm install + node ./example/sse-server.js + node ./example/sse-client.js # Node.js client + open http://localhost:8080 # Browser client - both native and polyfill + curl http://localhost:8080/sse # Enjoy the simplicity of SSE + +## Browser Polyfill + +Just add `example/eventsource-polyfill.js` file to your web page: + +```html + +``` + +Now you will have two global constructors: + +```javascript +window.EventSourcePolyfill +window.EventSource // Unchanged if browser has defined it. Otherwise, same as window.EventSourcePolyfill +``` + +If you're using [webpack](https://webpack.github.io/) or [browserify](http://browserify.org/) +you can of course build your own. (The `example/eventsource-polyfill.js` is built with webpack). + +## Extensions to the W3C API + +### Setting HTTP request headers + +You can define custom HTTP headers for the initial HTTP request. This can be useful for e.g. sending cookies +or to specify an initial `Last-Event-ID` value. + +HTTP headers are defined by assigning a `headers` attribute to the optional `eventSourceInitDict` argument: + +```javascript +var eventSourceInitDict = {headers: {'Cookie': 'test=test'}}; +var es = new EventSource(url, eventSourceInitDict); +``` + +### Allow unauthorized HTTPS requests + +By default, https requests that cannot be authorized will cause the connection to fail and an exception +to be emitted. You can override this behaviour, along with other https options: + +```javascript +var eventSourceInitDict = {https: {rejectUnauthorized: false}}; +var es = new EventSource(url, eventSourceInitDict); +``` + +Note that for Node.js < v0.10.x this option has no effect - unauthorized HTTPS requests are *always* allowed. + +### HTTP status code on error events + +Unauthorized and redirect error status codes (for example 401, 403, 301, 307) are available in the `status` property in the error event. + +```javascript +es.onerror = function (err) { + if (err) { + if (err.status === 401 || err.status === 403) { + console.log('not authorized'); + } + } +}; +``` + +### HTTP/HTTPS proxy + +You can define a `proxy` option for the HTTP request to be used. This is typically useful if you are behind a corporate firewall. + +```javascript +var es = new EventSource(url, {proxy: 'http://your.proxy.com'}); +``` + + +## License + +MIT-licensed. See LICENSE diff --git a/node_modules/smee-client/node_modules/eventsource/example/eventsource-polyfill.js b/node_modules/smee-client/node_modules/eventsource/example/eventsource-polyfill.js new file mode 100644 index 0000000..99d4bf2 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/example/eventsource-polyfill.js @@ -0,0 +1,9726 @@ +/******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // define getter function for harmony exports +/******/ __webpack_require__.d = function(exports, name, getter) { +/******/ if(!__webpack_require__.o(exports, name)) { +/******/ Object.defineProperty(exports, name, { +/******/ configurable: false, +/******/ enumerable: true, +/******/ get: getter +/******/ }); +/******/ } +/******/ }; +/******/ +/******/ // getDefaultExport function for compatibility with non-harmony modules +/******/ __webpack_require__.n = function(module) { +/******/ var getter = module && module.__esModule ? +/******/ function getDefault() { return module['default']; } : +/******/ function getModuleExports() { return module; }; +/******/ __webpack_require__.d(getter, 'a', getter); +/******/ return getter; +/******/ }; +/******/ +/******/ // Object.prototype.hasOwnProperty.call +/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(__webpack_require__.s = 21); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ (function(module, exports) { + +var g; + +// This works in non-strict mode +g = (function() { + return this; +})(); + +try { + // This works if eval is allowed (see CSP) + g = g || Function("return this")() || (1,eval)("this"); +} catch(e) { + // This works if the window reference is available + if(typeof window === "object") + g = window; +} + +// g can still be undefined, but nothing to do about it... +// We return undefined, instead of nothing here, so it's +// easier to handle this case. if(!global) { ...} + +module.exports = g; + + +/***/ }), +/* 1 */ +/***/ (function(module, exports) { + +// shim for using process in browser +var process = module.exports = {}; + +// cached from whatever global is present so that test runners that stub it +// don't break things. But we need to wrap it in a try catch in case it is +// wrapped in strict mode code which doesn't define any globals. It's inside a +// function because try/catches deoptimize in certain engines. + +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} +function defaultClearTimeout () { + throw new Error('clearTimeout has not been defined'); +} +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +} ()) +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } + // if setTimeout wasn't available but was latter defined + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch(e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch(e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + + +} +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } + // if clearTimeout wasn't available but was latter defined + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + + + +} +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + draining = false; + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + var timeout = runTimeout(cleanUpNextTick); + draining = true; + + var len = queue.length; + while(len) { + currentQueue = queue; + queue = []; + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + queueIndex = -1; + len = queue.length; + } + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + queue.push(new Item(fun, args)); + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +// v8 likes predictible objects +function Item(fun, array) { + this.fun = fun; + this.array = array; +} +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; // empty string to avoid regexp issues +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; +process.prependListener = noop; +process.prependOnceListener = noop; + +process.listeners = function (name) { return [] } + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { return '/' }; +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; +process.umask = function() { return 0; }; + + +/***/ }), +/* 2 */ +/***/ (function(module, exports) { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} + + +/***/ }), +/* 3 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(global) {/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +/* eslint-disable no-proto */ + + + +var base64 = __webpack_require__(23) +var ieee754 = __webpack_require__(24) +var isArray = __webpack_require__(10) + +exports.Buffer = Buffer +exports.SlowBuffer = SlowBuffer +exports.INSPECT_MAX_BYTES = 50 + +/** + * If `Buffer.TYPED_ARRAY_SUPPORT`: + * === true Use Uint8Array implementation (fastest) + * === false Use Object implementation (most compatible, even IE6) + * + * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, + * Opera 11.6+, iOS 4.2+. + * + * Due to various browser bugs, sometimes the Object implementation will be used even + * when the browser supports typed arrays. + * + * Note: + * + * - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances, + * See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438. + * + * - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function. + * + * - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of + * incorrect length in some situations. + + * We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they + * get the Object implementation, which is slower but behaves correctly. + */ +Buffer.TYPED_ARRAY_SUPPORT = global.TYPED_ARRAY_SUPPORT !== undefined + ? global.TYPED_ARRAY_SUPPORT + : typedArraySupport() + +/* + * Export kMaxLength after typed array support is determined. + */ +exports.kMaxLength = kMaxLength() + +function typedArraySupport () { + try { + var arr = new Uint8Array(1) + arr.__proto__ = {__proto__: Uint8Array.prototype, foo: function () { return 42 }} + return arr.foo() === 42 && // typed array instances can be augmented + typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray` + arr.subarray(1, 1).byteLength === 0 // ie10 has broken `subarray` + } catch (e) { + return false + } +} + +function kMaxLength () { + return Buffer.TYPED_ARRAY_SUPPORT + ? 0x7fffffff + : 0x3fffffff +} + +function createBuffer (that, length) { + if (kMaxLength() < length) { + throw new RangeError('Invalid typed array length') + } + if (Buffer.TYPED_ARRAY_SUPPORT) { + // Return an augmented `Uint8Array` instance, for best performance + that = new Uint8Array(length) + that.__proto__ = Buffer.prototype + } else { + // Fallback: Return an object instance of the Buffer class + if (that === null) { + that = new Buffer(length) + } + that.length = length + } + + return that +} + +/** + * The Buffer constructor returns instances of `Uint8Array` that have their + * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of + * `Uint8Array`, so the returned instances will have all the node `Buffer` methods + * and the `Uint8Array` methods. Square bracket notation works as expected -- it + * returns a single octet. + * + * The `Uint8Array` prototype remains unmodified. + */ + +function Buffer (arg, encodingOrOffset, length) { + if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) { + return new Buffer(arg, encodingOrOffset, length) + } + + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new Error( + 'If encoding is specified then the first argument must be a string' + ) + } + return allocUnsafe(this, arg) + } + return from(this, arg, encodingOrOffset, length) +} + +Buffer.poolSize = 8192 // not used by this implementation + +// TODO: Legacy, not needed anymore. Remove in next major version. +Buffer._augment = function (arr) { + arr.__proto__ = Buffer.prototype + return arr +} + +function from (that, value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('"value" argument must not be a number') + } + + if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { + return fromArrayBuffer(that, value, encodingOrOffset, length) + } + + if (typeof value === 'string') { + return fromString(that, value, encodingOrOffset) + } + + return fromObject(that, value) +} + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + **/ +Buffer.from = function (value, encodingOrOffset, length) { + return from(null, value, encodingOrOffset, length) +} + +if (Buffer.TYPED_ARRAY_SUPPORT) { + Buffer.prototype.__proto__ = Uint8Array.prototype + Buffer.__proto__ = Uint8Array + if (typeof Symbol !== 'undefined' && Symbol.species && + Buffer[Symbol.species] === Buffer) { + // Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97 + Object.defineProperty(Buffer, Symbol.species, { + value: null, + configurable: true + }) + } +} + +function assertSize (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be a number') + } else if (size < 0) { + throw new RangeError('"size" argument must not be negative') + } +} + +function alloc (that, size, fill, encoding) { + assertSize(size) + if (size <= 0) { + return createBuffer(that, size) + } + if (fill !== undefined) { + // Only pay attention to encoding if it's a string. This + // prevents accidentally sending in a number that would + // be interpretted as a start offset. + return typeof encoding === 'string' + ? createBuffer(that, size).fill(fill, encoding) + : createBuffer(that, size).fill(fill) + } + return createBuffer(that, size) +} + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + **/ +Buffer.alloc = function (size, fill, encoding) { + return alloc(null, size, fill, encoding) +} + +function allocUnsafe (that, size) { + assertSize(size) + that = createBuffer(that, size < 0 ? 0 : checked(size) | 0) + if (!Buffer.TYPED_ARRAY_SUPPORT) { + for (var i = 0; i < size; ++i) { + that[i] = 0 + } + } + return that +} + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. + * */ +Buffer.allocUnsafe = function (size) { + return allocUnsafe(null, size) +} +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. + */ +Buffer.allocUnsafeSlow = function (size) { + return allocUnsafe(null, size) +} + +function fromString (that, string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('"encoding" must be a valid string encoding') + } + + var length = byteLength(string, encoding) | 0 + that = createBuffer(that, length) + + var actual = that.write(string, encoding) + + if (actual !== length) { + // Writing a hex string, for example, that contains invalid characters will + // cause everything after the first invalid character to be ignored. (e.g. + // 'abxxcd' will be treated as 'ab') + that = that.slice(0, actual) + } + + return that +} + +function fromArrayLike (that, array) { + var length = array.length < 0 ? 0 : checked(array.length) | 0 + that = createBuffer(that, length) + for (var i = 0; i < length; i += 1) { + that[i] = array[i] & 255 + } + return that +} + +function fromArrayBuffer (that, array, byteOffset, length) { + array.byteLength // this throws if `array` is not a valid ArrayBuffer + + if (byteOffset < 0 || array.byteLength < byteOffset) { + throw new RangeError('\'offset\' is out of bounds') + } + + if (array.byteLength < byteOffset + (length || 0)) { + throw new RangeError('\'length\' is out of bounds') + } + + if (byteOffset === undefined && length === undefined) { + array = new Uint8Array(array) + } else if (length === undefined) { + array = new Uint8Array(array, byteOffset) + } else { + array = new Uint8Array(array, byteOffset, length) + } + + if (Buffer.TYPED_ARRAY_SUPPORT) { + // Return an augmented `Uint8Array` instance, for best performance + that = array + that.__proto__ = Buffer.prototype + } else { + // Fallback: Return an object instance of the Buffer class + that = fromArrayLike(that, array) + } + return that +} + +function fromObject (that, obj) { + if (Buffer.isBuffer(obj)) { + var len = checked(obj.length) | 0 + that = createBuffer(that, len) + + if (that.length === 0) { + return that + } + + obj.copy(that, 0, 0, len) + return that + } + + if (obj) { + if ((typeof ArrayBuffer !== 'undefined' && + obj.buffer instanceof ArrayBuffer) || 'length' in obj) { + if (typeof obj.length !== 'number' || isnan(obj.length)) { + return createBuffer(that, 0) + } + return fromArrayLike(that, obj) + } + + if (obj.type === 'Buffer' && isArray(obj.data)) { + return fromArrayLike(that, obj.data) + } + } + + throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.') +} + +function checked (length) { + // Note: cannot use `length < kMaxLength()` here because that fails when + // length is NaN (which is otherwise coerced to zero.) + if (length >= kMaxLength()) { + throw new RangeError('Attempt to allocate Buffer larger than maximum ' + + 'size: 0x' + kMaxLength().toString(16) + ' bytes') + } + return length | 0 +} + +function SlowBuffer (length) { + if (+length != length) { // eslint-disable-line eqeqeq + length = 0 + } + return Buffer.alloc(+length) +} + +Buffer.isBuffer = function isBuffer (b) { + return !!(b != null && b._isBuffer) +} + +Buffer.compare = function compare (a, b) { + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + throw new TypeError('Arguments must be Buffers') + } + + if (a === b) return 0 + + var x = a.length + var y = b.length + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i] + y = b[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +Buffer.isEncoding = function isEncoding (encoding) { + switch (String(encoding).toLowerCase()) { + case 'hex': + case 'utf8': + case 'utf-8': + case 'ascii': + case 'latin1': + case 'binary': + case 'base64': + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return true + default: + return false + } +} + +Buffer.concat = function concat (list, length) { + if (!isArray(list)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + + if (list.length === 0) { + return Buffer.alloc(0) + } + + var i + if (length === undefined) { + length = 0 + for (i = 0; i < list.length; ++i) { + length += list[i].length + } + } + + var buffer = Buffer.allocUnsafe(length) + var pos = 0 + for (i = 0; i < list.length; ++i) { + var buf = list[i] + if (!Buffer.isBuffer(buf)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + buf.copy(buffer, pos) + pos += buf.length + } + return buffer +} + +function byteLength (string, encoding) { + if (Buffer.isBuffer(string)) { + return string.length + } + if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' && + (ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) { + return string.byteLength + } + if (typeof string !== 'string') { + string = '' + string + } + + var len = string.length + if (len === 0) return 0 + + // Use a for loop to avoid recursion + var loweredCase = false + for (;;) { + switch (encoding) { + case 'ascii': + case 'latin1': + case 'binary': + return len + case 'utf8': + case 'utf-8': + case undefined: + return utf8ToBytes(string).length + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return len * 2 + case 'hex': + return len >>> 1 + case 'base64': + return base64ToBytes(string).length + default: + if (loweredCase) return utf8ToBytes(string).length // assume utf8 + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} +Buffer.byteLength = byteLength + +function slowToString (encoding, start, end) { + var loweredCase = false + + // No need to verify that "this.length <= MAX_UINT32" since it's a read-only + // property of a typed array. + + // This behaves neither like String nor Uint8Array in that we set start/end + // to their upper/lower bounds if the value passed is out of range. + // undefined is handled specially as per ECMA-262 6th Edition, + // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. + if (start === undefined || start < 0) { + start = 0 + } + // Return early if start > this.length. Done here to prevent potential uint32 + // coercion fail below. + if (start > this.length) { + return '' + } + + if (end === undefined || end > this.length) { + end = this.length + } + + if (end <= 0) { + return '' + } + + // Force coersion to uint32. This will also coerce falsey/NaN values to 0. + end >>>= 0 + start >>>= 0 + + if (end <= start) { + return '' + } + + if (!encoding) encoding = 'utf8' + + while (true) { + switch (encoding) { + case 'hex': + return hexSlice(this, start, end) + + case 'utf8': + case 'utf-8': + return utf8Slice(this, start, end) + + case 'ascii': + return asciiSlice(this, start, end) + + case 'latin1': + case 'binary': + return latin1Slice(this, start, end) + + case 'base64': + return base64Slice(this, start, end) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16leSlice(this, start, end) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = (encoding + '').toLowerCase() + loweredCase = true + } + } +} + +// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect +// Buffer instances. +Buffer.prototype._isBuffer = true + +function swap (b, n, m) { + var i = b[n] + b[n] = b[m] + b[m] = i +} + +Buffer.prototype.swap16 = function swap16 () { + var len = this.length + if (len % 2 !== 0) { + throw new RangeError('Buffer size must be a multiple of 16-bits') + } + for (var i = 0; i < len; i += 2) { + swap(this, i, i + 1) + } + return this +} + +Buffer.prototype.swap32 = function swap32 () { + var len = this.length + if (len % 4 !== 0) { + throw new RangeError('Buffer size must be a multiple of 32-bits') + } + for (var i = 0; i < len; i += 4) { + swap(this, i, i + 3) + swap(this, i + 1, i + 2) + } + return this +} + +Buffer.prototype.swap64 = function swap64 () { + var len = this.length + if (len % 8 !== 0) { + throw new RangeError('Buffer size must be a multiple of 64-bits') + } + for (var i = 0; i < len; i += 8) { + swap(this, i, i + 7) + swap(this, i + 1, i + 6) + swap(this, i + 2, i + 5) + swap(this, i + 3, i + 4) + } + return this +} + +Buffer.prototype.toString = function toString () { + var length = this.length | 0 + if (length === 0) return '' + if (arguments.length === 0) return utf8Slice(this, 0, length) + return slowToString.apply(this, arguments) +} + +Buffer.prototype.equals = function equals (b) { + if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') + if (this === b) return true + return Buffer.compare(this, b) === 0 +} + +Buffer.prototype.inspect = function inspect () { + var str = '' + var max = exports.INSPECT_MAX_BYTES + if (this.length > 0) { + str = this.toString('hex', 0, max).match(/.{2}/g).join(' ') + if (this.length > max) str += ' ... ' + } + return '' +} + +Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { + if (!Buffer.isBuffer(target)) { + throw new TypeError('Argument must be a Buffer') + } + + if (start === undefined) { + start = 0 + } + if (end === undefined) { + end = target ? target.length : 0 + } + if (thisStart === undefined) { + thisStart = 0 + } + if (thisEnd === undefined) { + thisEnd = this.length + } + + if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { + throw new RangeError('out of range index') + } + + if (thisStart >= thisEnd && start >= end) { + return 0 + } + if (thisStart >= thisEnd) { + return -1 + } + if (start >= end) { + return 1 + } + + start >>>= 0 + end >>>= 0 + thisStart >>>= 0 + thisEnd >>>= 0 + + if (this === target) return 0 + + var x = thisEnd - thisStart + var y = end - start + var len = Math.min(x, y) + + var thisCopy = this.slice(thisStart, thisEnd) + var targetCopy = target.slice(start, end) + + for (var i = 0; i < len; ++i) { + if (thisCopy[i] !== targetCopy[i]) { + x = thisCopy[i] + y = targetCopy[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant is val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { + // Empty buffer means no match + if (buffer.length === 0) return -1 + + // Normalize byteOffset + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000 + } + byteOffset = +byteOffset // Coerce to Number. + if (isNaN(byteOffset)) { + // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer + byteOffset = dir ? 0 : (buffer.length - 1) + } + + // Normalize byteOffset: negative offsets start from the end of the buffer + if (byteOffset < 0) byteOffset = buffer.length + byteOffset + if (byteOffset >= buffer.length) { + if (dir) return -1 + else byteOffset = buffer.length - 1 + } else if (byteOffset < 0) { + if (dir) byteOffset = 0 + else return -1 + } + + // Normalize val + if (typeof val === 'string') { + val = Buffer.from(val, encoding) + } + + // Finally, search either indexOf (if dir is true) or lastIndexOf + if (Buffer.isBuffer(val)) { + // Special case: looking for empty string/buffer always fails + if (val.length === 0) { + return -1 + } + return arrayIndexOf(buffer, val, byteOffset, encoding, dir) + } else if (typeof val === 'number') { + val = val & 0xFF // Search for a byte value [0-255] + if (Buffer.TYPED_ARRAY_SUPPORT && + typeof Uint8Array.prototype.indexOf === 'function') { + if (dir) { + return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) + } else { + return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) + } + } + return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir) + } + + throw new TypeError('val must be string, number or Buffer') +} + +function arrayIndexOf (arr, val, byteOffset, encoding, dir) { + var indexSize = 1 + var arrLength = arr.length + var valLength = val.length + + if (encoding !== undefined) { + encoding = String(encoding).toLowerCase() + if (encoding === 'ucs2' || encoding === 'ucs-2' || + encoding === 'utf16le' || encoding === 'utf-16le') { + if (arr.length < 2 || val.length < 2) { + return -1 + } + indexSize = 2 + arrLength /= 2 + valLength /= 2 + byteOffset /= 2 + } + } + + function read (buf, i) { + if (indexSize === 1) { + return buf[i] + } else { + return buf.readUInt16BE(i * indexSize) + } + } + + var i + if (dir) { + var foundIndex = -1 + for (i = byteOffset; i < arrLength; i++) { + if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === valLength) return foundIndex * indexSize + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength + for (i = byteOffset; i >= 0; i--) { + var found = true + for (var j = 0; j < valLength; j++) { + if (read(arr, i + j) !== read(val, j)) { + found = false + break + } + } + if (found) return i + } + } + + return -1 +} + +Buffer.prototype.includes = function includes (val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1 +} + +Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true) +} + +Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false) +} + +function hexWrite (buf, string, offset, length) { + offset = Number(offset) || 0 + var remaining = buf.length - offset + if (!length) { + length = remaining + } else { + length = Number(length) + if (length > remaining) { + length = remaining + } + } + + // must be an even number of digits + var strLen = string.length + if (strLen % 2 !== 0) throw new TypeError('Invalid hex string') + + if (length > strLen / 2) { + length = strLen / 2 + } + for (var i = 0; i < length; ++i) { + var parsed = parseInt(string.substr(i * 2, 2), 16) + if (isNaN(parsed)) return i + buf[offset + i] = parsed + } + return i +} + +function utf8Write (buf, string, offset, length) { + return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) +} + +function asciiWrite (buf, string, offset, length) { + return blitBuffer(asciiToBytes(string), buf, offset, length) +} + +function latin1Write (buf, string, offset, length) { + return asciiWrite(buf, string, offset, length) +} + +function base64Write (buf, string, offset, length) { + return blitBuffer(base64ToBytes(string), buf, offset, length) +} + +function ucs2Write (buf, string, offset, length) { + return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) +} + +Buffer.prototype.write = function write (string, offset, length, encoding) { + // Buffer#write(string) + if (offset === undefined) { + encoding = 'utf8' + length = this.length + offset = 0 + // Buffer#write(string, encoding) + } else if (length === undefined && typeof offset === 'string') { + encoding = offset + length = this.length + offset = 0 + // Buffer#write(string, offset[, length][, encoding]) + } else if (isFinite(offset)) { + offset = offset | 0 + if (isFinite(length)) { + length = length | 0 + if (encoding === undefined) encoding = 'utf8' + } else { + encoding = length + length = undefined + } + // legacy write(string, encoding, offset, length) - remove in v0.13 + } else { + throw new Error( + 'Buffer.write(string, encoding, offset[, length]) is no longer supported' + ) + } + + var remaining = this.length - offset + if (length === undefined || length > remaining) length = remaining + + if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { + throw new RangeError('Attempt to write outside buffer bounds') + } + + if (!encoding) encoding = 'utf8' + + var loweredCase = false + for (;;) { + switch (encoding) { + case 'hex': + return hexWrite(this, string, offset, length) + + case 'utf8': + case 'utf-8': + return utf8Write(this, string, offset, length) + + case 'ascii': + return asciiWrite(this, string, offset, length) + + case 'latin1': + case 'binary': + return latin1Write(this, string, offset, length) + + case 'base64': + // Warning: maxLength not taken into account in base64Write + return base64Write(this, string, offset, length) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return ucs2Write(this, string, offset, length) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} + +Buffer.prototype.toJSON = function toJSON () { + return { + type: 'Buffer', + data: Array.prototype.slice.call(this._arr || this, 0) + } +} + +function base64Slice (buf, start, end) { + if (start === 0 && end === buf.length) { + return base64.fromByteArray(buf) + } else { + return base64.fromByteArray(buf.slice(start, end)) + } +} + +function utf8Slice (buf, start, end) { + end = Math.min(buf.length, end) + var res = [] + + var i = start + while (i < end) { + var firstByte = buf[i] + var codePoint = null + var bytesPerSequence = (firstByte > 0xEF) ? 4 + : (firstByte > 0xDF) ? 3 + : (firstByte > 0xBF) ? 2 + : 1 + + if (i + bytesPerSequence <= end) { + var secondByte, thirdByte, fourthByte, tempCodePoint + + switch (bytesPerSequence) { + case 1: + if (firstByte < 0x80) { + codePoint = firstByte + } + break + case 2: + secondByte = buf[i + 1] + if ((secondByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) + if (tempCodePoint > 0x7F) { + codePoint = tempCodePoint + } + } + break + case 3: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) + if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { + codePoint = tempCodePoint + } + } + break + case 4: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + fourthByte = buf[i + 3] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) + if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { + codePoint = tempCodePoint + } + } + } + } + + if (codePoint === null) { + // we did not generate a valid codePoint so insert a + // replacement char (U+FFFD) and advance only 1 byte + codePoint = 0xFFFD + bytesPerSequence = 1 + } else if (codePoint > 0xFFFF) { + // encode to utf16 (surrogate pair dance) + codePoint -= 0x10000 + res.push(codePoint >>> 10 & 0x3FF | 0xD800) + codePoint = 0xDC00 | codePoint & 0x3FF + } + + res.push(codePoint) + i += bytesPerSequence + } + + return decodeCodePointsArray(res) +} + +// Based on http://stackoverflow.com/a/22747272/680742, the browser with +// the lowest limit is Chrome, with 0x10000 args. +// We go 1 magnitude less, for safety +var MAX_ARGUMENTS_LENGTH = 0x1000 + +function decodeCodePointsArray (codePoints) { + var len = codePoints.length + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints) // avoid extra slice() + } + + // Decode in chunks to avoid "call stack size exceeded". + var res = '' + var i = 0 + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ) + } + return res +} + +function asciiSlice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i] & 0x7F) + } + return ret +} + +function latin1Slice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i]) + } + return ret +} + +function hexSlice (buf, start, end) { + var len = buf.length + + if (!start || start < 0) start = 0 + if (!end || end < 0 || end > len) end = len + + var out = '' + for (var i = start; i < end; ++i) { + out += toHex(buf[i]) + } + return out +} + +function utf16leSlice (buf, start, end) { + var bytes = buf.slice(start, end) + var res = '' + for (var i = 0; i < bytes.length; i += 2) { + res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256) + } + return res +} + +Buffer.prototype.slice = function slice (start, end) { + var len = this.length + start = ~~start + end = end === undefined ? len : ~~end + + if (start < 0) { + start += len + if (start < 0) start = 0 + } else if (start > len) { + start = len + } + + if (end < 0) { + end += len + if (end < 0) end = 0 + } else if (end > len) { + end = len + } + + if (end < start) end = start + + var newBuf + if (Buffer.TYPED_ARRAY_SUPPORT) { + newBuf = this.subarray(start, end) + newBuf.__proto__ = Buffer.prototype + } else { + var sliceLen = end - start + newBuf = new Buffer(sliceLen, undefined) + for (var i = 0; i < sliceLen; ++i) { + newBuf[i] = this[i + start] + } + } + + return newBuf +} + +/* + * Need to make sure that buffer isn't trying to write out of bounds. + */ +function checkOffset (offset, ext, length) { + if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') + if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') +} + +Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + + return val +} + +Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) { + checkOffset(offset, byteLength, this.length) + } + + var val = this[offset + --byteLength] + var mul = 1 + while (byteLength > 0 && (mul *= 0x100)) { + val += this[offset + --byteLength] * mul + } + + return val +} + +Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { + if (!noAssert) checkOffset(offset, 1, this.length) + return this[offset] +} + +Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + return this[offset] | (this[offset + 1] << 8) +} + +Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + return (this[offset] << 8) | this[offset + 1] +} + +Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return ((this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16)) + + (this[offset + 3] * 0x1000000) +} + +Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] * 0x1000000) + + ((this[offset + 1] << 16) | + (this[offset + 2] << 8) | + this[offset + 3]) +} + +Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var i = byteLength + var mul = 1 + var val = this[offset + --i] + while (i > 0 && (mul *= 0x100)) { + val += this[offset + --i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { + if (!noAssert) checkOffset(offset, 1, this.length) + if (!(this[offset] & 0x80)) return (this[offset]) + return ((0xff - this[offset] + 1) * -1) +} + +Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset] | (this[offset + 1] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset + 1] | (this[offset] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16) | + (this[offset + 3] << 24) +} + +Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] << 24) | + (this[offset + 1] << 16) | + (this[offset + 2] << 8) | + (this[offset + 3]) +} + +Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, true, 23, 4) +} + +Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, false, 23, 4) +} + +Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, true, 52, 8) +} + +Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, false, 52, 8) +} + +function checkInt (buf, value, offset, ext, max, min) { + if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') + if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') + if (offset + ext > buf.length) throw new RangeError('Index out of range') +} + +Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var mul = 1 + var i = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + byteLength = byteLength | 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var i = byteLength - 1 + var mul = 1 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) + if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) + this[offset] = (value & 0xff) + return offset + 1 +} + +function objectWriteUInt16 (buf, value, offset, littleEndian) { + if (value < 0) value = 0xffff + value + 1 + for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) { + buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> + (littleEndian ? i : 1 - i) * 8 + } +} + +Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + } else { + objectWriteUInt16(this, value, offset, true) + } + return offset + 2 +} + +Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + } else { + objectWriteUInt16(this, value, offset, false) + } + return offset + 2 +} + +function objectWriteUInt32 (buf, value, offset, littleEndian) { + if (value < 0) value = 0xffffffff + value + 1 + for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) { + buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff + } +} + +Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset + 3] = (value >>> 24) + this[offset + 2] = (value >>> 16) + this[offset + 1] = (value >>> 8) + this[offset] = (value & 0xff) + } else { + objectWriteUInt32(this, value, offset, true) + } + return offset + 4 +} + +Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + } else { + objectWriteUInt32(this, value, offset, false) + } + return offset + 4 +} + +Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) { + var limit = Math.pow(2, 8 * byteLength - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = 0 + var mul = 1 + var sub = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) { + var limit = Math.pow(2, 8 * byteLength - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = byteLength - 1 + var mul = 1 + var sub = 0 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) + if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) + if (value < 0) value = 0xff + value + 1 + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + } else { + objectWriteUInt16(this, value, offset, true) + } + return offset + 2 +} + +Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + } else { + objectWriteUInt16(this, value, offset, false) + } + return offset + 2 +} + +Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + this[offset + 2] = (value >>> 16) + this[offset + 3] = (value >>> 24) + } else { + objectWriteUInt32(this, value, offset, true) + } + return offset + 4 +} + +Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { + value = +value + offset = offset | 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (value < 0) value = 0xffffffff + value + 1 + if (Buffer.TYPED_ARRAY_SUPPORT) { + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + } else { + objectWriteUInt32(this, value, offset, false) + } + return offset + 4 +} + +function checkIEEE754 (buf, value, offset, ext, max, min) { + if (offset + ext > buf.length) throw new RangeError('Index out of range') + if (offset < 0) throw new RangeError('Index out of range') +} + +function writeFloat (buf, value, offset, littleEndian, noAssert) { + if (!noAssert) { + checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) + } + ieee754.write(buf, value, offset, littleEndian, 23, 4) + return offset + 4 +} + +Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { + return writeFloat(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { + return writeFloat(this, value, offset, false, noAssert) +} + +function writeDouble (buf, value, offset, littleEndian, noAssert) { + if (!noAssert) { + checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) + } + ieee754.write(buf, value, offset, littleEndian, 52, 8) + return offset + 8 +} + +Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { + return writeDouble(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { + return writeDouble(this, value, offset, false, noAssert) +} + +// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) +Buffer.prototype.copy = function copy (target, targetStart, start, end) { + if (!start) start = 0 + if (!end && end !== 0) end = this.length + if (targetStart >= target.length) targetStart = target.length + if (!targetStart) targetStart = 0 + if (end > 0 && end < start) end = start + + // Copy 0 bytes; we're done + if (end === start) return 0 + if (target.length === 0 || this.length === 0) return 0 + + // Fatal error conditions + if (targetStart < 0) { + throw new RangeError('targetStart out of bounds') + } + if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds') + if (end < 0) throw new RangeError('sourceEnd out of bounds') + + // Are we oob? + if (end > this.length) end = this.length + if (target.length - targetStart < end - start) { + end = target.length - targetStart + start + } + + var len = end - start + var i + + if (this === target && start < targetStart && targetStart < end) { + // descending copy from end + for (i = len - 1; i >= 0; --i) { + target[i + targetStart] = this[i + start] + } + } else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) { + // ascending copy from start + for (i = 0; i < len; ++i) { + target[i + targetStart] = this[i + start] + } + } else { + Uint8Array.prototype.set.call( + target, + this.subarray(start, start + len), + targetStart + ) + } + + return len +} + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill (val, start, end, encoding) { + // Handle string cases: + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = this.length + } else if (typeof end === 'string') { + encoding = end + end = this.length + } + if (val.length === 1) { + var code = val.charCodeAt(0) + if (code < 256) { + val = code + } + } + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + } else if (typeof val === 'number') { + val = val & 255 + } + + // Invalid ranges are not set to a default, so can range check early. + if (start < 0 || this.length < start || this.length < end) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return this + } + + start = start >>> 0 + end = end === undefined ? this.length : end >>> 0 + + if (!val) val = 0 + + var i + if (typeof val === 'number') { + for (i = start; i < end; ++i) { + this[i] = val + } + } else { + var bytes = Buffer.isBuffer(val) + ? val + : utf8ToBytes(new Buffer(val, encoding).toString()) + var len = bytes.length + for (i = 0; i < end - start; ++i) { + this[i + start] = bytes[i % len] + } + } + + return this +} + +// HELPER FUNCTIONS +// ================ + +var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g + +function base64clean (str) { + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = stringtrim(str).replace(INVALID_BASE64_RE, '') + // Node converts strings with length < 2 to '' + if (str.length < 2) return '' + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '=' + } + return str +} + +function stringtrim (str) { + if (str.trim) return str.trim() + return str.replace(/^\s+|\s+$/g, '') +} + +function toHex (n) { + if (n < 16) return '0' + n.toString(16) + return n.toString(16) +} + +function utf8ToBytes (string, units) { + units = units || Infinity + var codePoint + var length = string.length + var leadSurrogate = null + var bytes = [] + + for (var i = 0; i < length; ++i) { + codePoint = string.charCodeAt(i) + + // is surrogate component + if (codePoint > 0xD7FF && codePoint < 0xE000) { + // last char was a lead + if (!leadSurrogate) { + // no lead yet + if (codePoint > 0xDBFF) { + // unexpected trail + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } else if (i + 1 === length) { + // unpaired lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } + + // valid lead + leadSurrogate = codePoint + + continue + } + + // 2 leads in a row + if (codePoint < 0xDC00) { + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + leadSurrogate = codePoint + continue + } + + // valid surrogate pair + codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 + } else if (leadSurrogate) { + // valid bmp char, but last char was a lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + } + + leadSurrogate = null + + // encode utf8 + if (codePoint < 0x80) { + if ((units -= 1) < 0) break + bytes.push(codePoint) + } else if (codePoint < 0x800) { + if ((units -= 2) < 0) break + bytes.push( + codePoint >> 0x6 | 0xC0, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x10000) { + if ((units -= 3) < 0) break + bytes.push( + codePoint >> 0xC | 0xE0, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x110000) { + if ((units -= 4) < 0) break + bytes.push( + codePoint >> 0x12 | 0xF0, + codePoint >> 0xC & 0x3F | 0x80, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else { + throw new Error('Invalid code point') + } + } + + return bytes +} + +function asciiToBytes (str) { + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + // Node's code seems to be doing this and not & 0x7F.. + byteArray.push(str.charCodeAt(i) & 0xFF) + } + return byteArray +} + +function utf16leToBytes (str, units) { + var c, hi, lo + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + if ((units -= 2) < 0) break + + c = str.charCodeAt(i) + hi = c >> 8 + lo = c % 256 + byteArray.push(lo) + byteArray.push(hi) + } + + return byteArray +} + +function base64ToBytes (str) { + return base64.toByteArray(base64clean(str)) +} + +function blitBuffer (src, dst, offset, length) { + for (var i = 0; i < length; ++i) { + if ((i + offset >= dst.length) || (i >= src.length)) break + dst[i + offset] = src[i] + } + return i +} + +function isnan (val) { + return val !== val // eslint-disable-line no-self-compare +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 4 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +var Readable = __webpack_require__(15); +var Writable = __webpack_require__(18); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; + +/***/ }), +/* 5 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(Buffer) {// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === '[object Array]'; +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = Buffer.isBuffer; + +function objectToString(o) { + return Object.prototype.toString.call(o); +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(3).Buffer)) + +/***/ }), +/* 6 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(process) { + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1))) + +/***/ }), +/* 7 */ +/***/ (function(module, exports, __webpack_require__) { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __webpack_require__(3) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), +/* 8 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +var punycode = __webpack_require__(25); +var util = __webpack_require__(27); + +exports.parse = urlParse; +exports.resolve = urlResolve; +exports.resolveObject = urlResolveObject; +exports.format = urlFormat; + +exports.Url = Url; + +function Url() { + this.protocol = null; + this.slashes = null; + this.auth = null; + this.host = null; + this.port = null; + this.hostname = null; + this.hash = null; + this.search = null; + this.query = null; + this.pathname = null; + this.path = null; + this.href = null; +} + +// Reference: RFC 3986, RFC 1808, RFC 2396 + +// define these here so at least they only have to be +// compiled once on the first module load. +var protocolPattern = /^([a-z0-9.+-]+:)/i, + portPattern = /:[0-9]*$/, + + // Special case for a simple path URL + simplePathPattern = /^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/, + + // RFC 2396: characters reserved for delimiting URLs. + // We actually just auto-escape these. + delims = ['<', '>', '"', '`', ' ', '\r', '\n', '\t'], + + // RFC 2396: characters not allowed for various reasons. + unwise = ['{', '}', '|', '\\', '^', '`'].concat(delims), + + // Allowed by RFCs, but cause of XSS attacks. Always escape these. + autoEscape = ['\''].concat(unwise), + // Characters that are never ever allowed in a hostname. + // Note that any invalid chars are also handled, but these + // are the ones that are *expected* to be seen, so we fast-path + // them. + nonHostChars = ['%', '/', '?', ';', '#'].concat(autoEscape), + hostEndingChars = ['/', '?', '#'], + hostnameMaxLen = 255, + hostnamePartPattern = /^[+a-z0-9A-Z_-]{0,63}$/, + hostnamePartStart = /^([+a-z0-9A-Z_-]{0,63})(.*)$/, + // protocols that can allow "unsafe" and "unwise" chars. + unsafeProtocol = { + 'javascript': true, + 'javascript:': true + }, + // protocols that never have a hostname. + hostlessProtocol = { + 'javascript': true, + 'javascript:': true + }, + // protocols that always contain a // bit. + slashedProtocol = { + 'http': true, + 'https': true, + 'ftp': true, + 'gopher': true, + 'file': true, + 'http:': true, + 'https:': true, + 'ftp:': true, + 'gopher:': true, + 'file:': true + }, + querystring = __webpack_require__(28); + +function urlParse(url, parseQueryString, slashesDenoteHost) { + if (url && util.isObject(url) && url instanceof Url) return url; + + var u = new Url; + u.parse(url, parseQueryString, slashesDenoteHost); + return u; +} + +Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) { + if (!util.isString(url)) { + throw new TypeError("Parameter 'url' must be a string, not " + typeof url); + } + + // Copy chrome, IE, opera backslash-handling behavior. + // Back slashes before the query string get converted to forward slashes + // See: https://code.google.com/p/chromium/issues/detail?id=25916 + var queryIndex = url.indexOf('?'), + splitter = + (queryIndex !== -1 && queryIndex < url.indexOf('#')) ? '?' : '#', + uSplit = url.split(splitter), + slashRegex = /\\/g; + uSplit[0] = uSplit[0].replace(slashRegex, '/'); + url = uSplit.join(splitter); + + var rest = url; + + // trim before proceeding. + // This is to support parse stuff like " http://foo.com \n" + rest = rest.trim(); + + if (!slashesDenoteHost && url.split('#').length === 1) { + // Try fast path regexp + var simplePath = simplePathPattern.exec(rest); + if (simplePath) { + this.path = rest; + this.href = rest; + this.pathname = simplePath[1]; + if (simplePath[2]) { + this.search = simplePath[2]; + if (parseQueryString) { + this.query = querystring.parse(this.search.substr(1)); + } else { + this.query = this.search.substr(1); + } + } else if (parseQueryString) { + this.search = ''; + this.query = {}; + } + return this; + } + } + + var proto = protocolPattern.exec(rest); + if (proto) { + proto = proto[0]; + var lowerProto = proto.toLowerCase(); + this.protocol = lowerProto; + rest = rest.substr(proto.length); + } + + // figure out if it's got a host + // user@server is *always* interpreted as a hostname, and url + // resolution will treat //foo/bar as host=foo,path=bar because that's + // how the browser resolves relative URLs. + if (slashesDenoteHost || proto || rest.match(/^\/\/[^@\/]+@[^@\/]+/)) { + var slashes = rest.substr(0, 2) === '//'; + if (slashes && !(proto && hostlessProtocol[proto])) { + rest = rest.substr(2); + this.slashes = true; + } + } + + if (!hostlessProtocol[proto] && + (slashes || (proto && !slashedProtocol[proto]))) { + + // there's a hostname. + // the first instance of /, ?, ;, or # ends the host. + // + // If there is an @ in the hostname, then non-host chars *are* allowed + // to the left of the last @ sign, unless some host-ending character + // comes *before* the @-sign. + // URLs are obnoxious. + // + // ex: + // http://a@b@c/ => user:a@b host:c + // http://a@b?@c => user:a host:c path:/?@c + + // v0.12 TODO(isaacs): This is not quite how Chrome does things. + // Review our test case against browsers more comprehensively. + + // find the first instance of any hostEndingChars + var hostEnd = -1; + for (var i = 0; i < hostEndingChars.length; i++) { + var hec = rest.indexOf(hostEndingChars[i]); + if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) + hostEnd = hec; + } + + // at this point, either we have an explicit point where the + // auth portion cannot go past, or the last @ char is the decider. + var auth, atSign; + if (hostEnd === -1) { + // atSign can be anywhere. + atSign = rest.lastIndexOf('@'); + } else { + // atSign must be in auth portion. + // http://a@b/c@d => host:b auth:a path:/c@d + atSign = rest.lastIndexOf('@', hostEnd); + } + + // Now we have a portion which is definitely the auth. + // Pull that off. + if (atSign !== -1) { + auth = rest.slice(0, atSign); + rest = rest.slice(atSign + 1); + this.auth = decodeURIComponent(auth); + } + + // the host is the remaining to the left of the first non-host char + hostEnd = -1; + for (var i = 0; i < nonHostChars.length; i++) { + var hec = rest.indexOf(nonHostChars[i]); + if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) + hostEnd = hec; + } + // if we still have not hit it, then the entire thing is a host. + if (hostEnd === -1) + hostEnd = rest.length; + + this.host = rest.slice(0, hostEnd); + rest = rest.slice(hostEnd); + + // pull out port. + this.parseHost(); + + // we've indicated that there is a hostname, + // so even if it's empty, it has to be present. + this.hostname = this.hostname || ''; + + // if hostname begins with [ and ends with ] + // assume that it's an IPv6 address. + var ipv6Hostname = this.hostname[0] === '[' && + this.hostname[this.hostname.length - 1] === ']'; + + // validate a little. + if (!ipv6Hostname) { + var hostparts = this.hostname.split(/\./); + for (var i = 0, l = hostparts.length; i < l; i++) { + var part = hostparts[i]; + if (!part) continue; + if (!part.match(hostnamePartPattern)) { + var newpart = ''; + for (var j = 0, k = part.length; j < k; j++) { + if (part.charCodeAt(j) > 127) { + // we replace non-ASCII char with a temporary placeholder + // we need this to make sure size of hostname is not + // broken by replacing non-ASCII by nothing + newpart += 'x'; + } else { + newpart += part[j]; + } + } + // we test again with ASCII char only + if (!newpart.match(hostnamePartPattern)) { + var validParts = hostparts.slice(0, i); + var notHost = hostparts.slice(i + 1); + var bit = part.match(hostnamePartStart); + if (bit) { + validParts.push(bit[1]); + notHost.unshift(bit[2]); + } + if (notHost.length) { + rest = '/' + notHost.join('.') + rest; + } + this.hostname = validParts.join('.'); + break; + } + } + } + } + + if (this.hostname.length > hostnameMaxLen) { + this.hostname = ''; + } else { + // hostnames are always lower case. + this.hostname = this.hostname.toLowerCase(); + } + + if (!ipv6Hostname) { + // IDNA Support: Returns a punycoded representation of "domain". + // It only converts parts of the domain name that + // have non-ASCII characters, i.e. it doesn't matter if + // you call it with a domain that already is ASCII-only. + this.hostname = punycode.toASCII(this.hostname); + } + + var p = this.port ? ':' + this.port : ''; + var h = this.hostname || ''; + this.host = h + p; + this.href += this.host; + + // strip [ and ] from the hostname + // the host field still retains them, though + if (ipv6Hostname) { + this.hostname = this.hostname.substr(1, this.hostname.length - 2); + if (rest[0] !== '/') { + rest = '/' + rest; + } + } + } + + // now rest is set to the post-host stuff. + // chop off any delim chars. + if (!unsafeProtocol[lowerProto]) { + + // First, make 100% sure that any "autoEscape" chars get + // escaped, even if encodeURIComponent doesn't think they + // need to be. + for (var i = 0, l = autoEscape.length; i < l; i++) { + var ae = autoEscape[i]; + if (rest.indexOf(ae) === -1) + continue; + var esc = encodeURIComponent(ae); + if (esc === ae) { + esc = escape(ae); + } + rest = rest.split(ae).join(esc); + } + } + + + // chop off from the tail first. + var hash = rest.indexOf('#'); + if (hash !== -1) { + // got a fragment string. + this.hash = rest.substr(hash); + rest = rest.slice(0, hash); + } + var qm = rest.indexOf('?'); + if (qm !== -1) { + this.search = rest.substr(qm); + this.query = rest.substr(qm + 1); + if (parseQueryString) { + this.query = querystring.parse(this.query); + } + rest = rest.slice(0, qm); + } else if (parseQueryString) { + // no query string, but parseQueryString still requested + this.search = ''; + this.query = {}; + } + if (rest) this.pathname = rest; + if (slashedProtocol[lowerProto] && + this.hostname && !this.pathname) { + this.pathname = '/'; + } + + //to support http.request + if (this.pathname || this.search) { + var p = this.pathname || ''; + var s = this.search || ''; + this.path = p + s; + } + + // finally, reconstruct the href based on what has been validated. + this.href = this.format(); + return this; +}; + +// format a parsed object into a url string +function urlFormat(obj) { + // ensure it's an object, and not a string url. + // If it's an obj, this is a no-op. + // this way, you can call url_format() on strings + // to clean up potentially wonky urls. + if (util.isString(obj)) obj = urlParse(obj); + if (!(obj instanceof Url)) return Url.prototype.format.call(obj); + return obj.format(); +} + +Url.prototype.format = function() { + var auth = this.auth || ''; + if (auth) { + auth = encodeURIComponent(auth); + auth = auth.replace(/%3A/i, ':'); + auth += '@'; + } + + var protocol = this.protocol || '', + pathname = this.pathname || '', + hash = this.hash || '', + host = false, + query = ''; + + if (this.host) { + host = auth + this.host; + } else if (this.hostname) { + host = auth + (this.hostname.indexOf(':') === -1 ? + this.hostname : + '[' + this.hostname + ']'); + if (this.port) { + host += ':' + this.port; + } + } + + if (this.query && + util.isObject(this.query) && + Object.keys(this.query).length) { + query = querystring.stringify(this.query); + } + + var search = this.search || (query && ('?' + query)) || ''; + + if (protocol && protocol.substr(-1) !== ':') protocol += ':'; + + // only the slashedProtocols get the //. Not mailto:, xmpp:, etc. + // unless they had them to begin with. + if (this.slashes || + (!protocol || slashedProtocol[protocol]) && host !== false) { + host = '//' + (host || ''); + if (pathname && pathname.charAt(0) !== '/') pathname = '/' + pathname; + } else if (!host) { + host = ''; + } + + if (hash && hash.charAt(0) !== '#') hash = '#' + hash; + if (search && search.charAt(0) !== '?') search = '?' + search; + + pathname = pathname.replace(/[?#]/g, function(match) { + return encodeURIComponent(match); + }); + search = search.replace('#', '%23'); + + return protocol + host + pathname + search + hash; +}; + +function urlResolve(source, relative) { + return urlParse(source, false, true).resolve(relative); +} + +Url.prototype.resolve = function(relative) { + return this.resolveObject(urlParse(relative, false, true)).format(); +}; + +function urlResolveObject(source, relative) { + if (!source) return relative; + return urlParse(source, false, true).resolveObject(relative); +} + +Url.prototype.resolveObject = function(relative) { + if (util.isString(relative)) { + var rel = new Url(); + rel.parse(relative, false, true); + relative = rel; + } + + var result = new Url(); + var tkeys = Object.keys(this); + for (var tk = 0; tk < tkeys.length; tk++) { + var tkey = tkeys[tk]; + result[tkey] = this[tkey]; + } + + // hash is always overridden, no matter what. + // even href="" will remove it. + result.hash = relative.hash; + + // if the relative url is empty, then there's nothing left to do here. + if (relative.href === '') { + result.href = result.format(); + return result; + } + + // hrefs like //foo/bar always cut to the protocol. + if (relative.slashes && !relative.protocol) { + // take everything except the protocol from relative + var rkeys = Object.keys(relative); + for (var rk = 0; rk < rkeys.length; rk++) { + var rkey = rkeys[rk]; + if (rkey !== 'protocol') + result[rkey] = relative[rkey]; + } + + //urlParse appends trailing / to urls like http://www.example.com + if (slashedProtocol[result.protocol] && + result.hostname && !result.pathname) { + result.path = result.pathname = '/'; + } + + result.href = result.format(); + return result; + } + + if (relative.protocol && relative.protocol !== result.protocol) { + // if it's a known url protocol, then changing + // the protocol does weird things + // first, if it's not file:, then we MUST have a host, + // and if there was a path + // to begin with, then we MUST have a path. + // if it is file:, then the host is dropped, + // because that's known to be hostless. + // anything else is assumed to be absolute. + if (!slashedProtocol[relative.protocol]) { + var keys = Object.keys(relative); + for (var v = 0; v < keys.length; v++) { + var k = keys[v]; + result[k] = relative[k]; + } + result.href = result.format(); + return result; + } + + result.protocol = relative.protocol; + if (!relative.host && !hostlessProtocol[relative.protocol]) { + var relPath = (relative.pathname || '').split('/'); + while (relPath.length && !(relative.host = relPath.shift())); + if (!relative.host) relative.host = ''; + if (!relative.hostname) relative.hostname = ''; + if (relPath[0] !== '') relPath.unshift(''); + if (relPath.length < 2) relPath.unshift(''); + result.pathname = relPath.join('/'); + } else { + result.pathname = relative.pathname; + } + result.search = relative.search; + result.query = relative.query; + result.host = relative.host || ''; + result.auth = relative.auth; + result.hostname = relative.hostname || relative.host; + result.port = relative.port; + // to support http.request + if (result.pathname || result.search) { + var p = result.pathname || ''; + var s = result.search || ''; + result.path = p + s; + } + result.slashes = result.slashes || relative.slashes; + result.href = result.format(); + return result; + } + + var isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'), + isRelAbs = ( + relative.host || + relative.pathname && relative.pathname.charAt(0) === '/' + ), + mustEndAbs = (isRelAbs || isSourceAbs || + (result.host && relative.pathname)), + removeAllDots = mustEndAbs, + srcPath = result.pathname && result.pathname.split('/') || [], + relPath = relative.pathname && relative.pathname.split('/') || [], + psychotic = result.protocol && !slashedProtocol[result.protocol]; + + // if the url is a non-slashed url, then relative + // links like ../.. should be able + // to crawl up to the hostname, as well. This is strange. + // result.protocol has already been set by now. + // Later on, put the first path part into the host field. + if (psychotic) { + result.hostname = ''; + result.port = null; + if (result.host) { + if (srcPath[0] === '') srcPath[0] = result.host; + else srcPath.unshift(result.host); + } + result.host = ''; + if (relative.protocol) { + relative.hostname = null; + relative.port = null; + if (relative.host) { + if (relPath[0] === '') relPath[0] = relative.host; + else relPath.unshift(relative.host); + } + relative.host = null; + } + mustEndAbs = mustEndAbs && (relPath[0] === '' || srcPath[0] === ''); + } + + if (isRelAbs) { + // it's absolute. + result.host = (relative.host || relative.host === '') ? + relative.host : result.host; + result.hostname = (relative.hostname || relative.hostname === '') ? + relative.hostname : result.hostname; + result.search = relative.search; + result.query = relative.query; + srcPath = relPath; + // fall through to the dot-handling below. + } else if (relPath.length) { + // it's relative + // throw away the existing file, and take the new path instead. + if (!srcPath) srcPath = []; + srcPath.pop(); + srcPath = srcPath.concat(relPath); + result.search = relative.search; + result.query = relative.query; + } else if (!util.isNullOrUndefined(relative.search)) { + // just pull out the search. + // like href='?foo'. + // Put this after the other two cases because it simplifies the booleans + if (psychotic) { + result.hostname = result.host = srcPath.shift(); + //occationaly the auth can get stuck only in host + //this especially happens in cases like + //url.resolveObject('mailto:local1@domain1', 'local2@domain2') + var authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + result.search = relative.search; + result.query = relative.query; + //to support http.request + if (!util.isNull(result.pathname) || !util.isNull(result.search)) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.href = result.format(); + return result; + } + + if (!srcPath.length) { + // no path at all. easy. + // we've already handled the other stuff above. + result.pathname = null; + //to support http.request + if (result.search) { + result.path = '/' + result.search; + } else { + result.path = null; + } + result.href = result.format(); + return result; + } + + // if a url ENDs in . or .., then it must get a trailing slash. + // however, if it ends in anything else non-slashy, + // then it must NOT get a trailing slash. + var last = srcPath.slice(-1)[0]; + var hasTrailingSlash = ( + (result.host || relative.host || srcPath.length > 1) && + (last === '.' || last === '..') || last === ''); + + // strip single dots, resolve double dots to parent dir + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = srcPath.length; i >= 0; i--) { + last = srcPath[i]; + if (last === '.') { + srcPath.splice(i, 1); + } else if (last === '..') { + srcPath.splice(i, 1); + up++; + } else if (up) { + srcPath.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (!mustEndAbs && !removeAllDots) { + for (; up--; up) { + srcPath.unshift('..'); + } + } + + if (mustEndAbs && srcPath[0] !== '' && + (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { + srcPath.unshift(''); + } + + if (hasTrailingSlash && (srcPath.join('/').substr(-1) !== '/')) { + srcPath.push(''); + } + + var isAbsolute = srcPath[0] === '' || + (srcPath[0] && srcPath[0].charAt(0) === '/'); + + // put the host back + if (psychotic) { + result.hostname = result.host = isAbsolute ? '' : + srcPath.length ? srcPath.shift() : ''; + //occationaly the auth can get stuck only in host + //this especially happens in cases like + //url.resolveObject('mailto:local1@domain1', 'local2@domain2') + var authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + + mustEndAbs = mustEndAbs || (result.host && srcPath.length); + + if (mustEndAbs && !isAbsolute) { + srcPath.unshift(''); + } + + if (!srcPath.length) { + result.pathname = null; + result.path = null; + } else { + result.pathname = srcPath.join('/'); + } + + //to support request.http + if (!util.isNull(result.pathname) || !util.isNull(result.search)) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.auth = relative.auth || result.auth; + result.slashes = result.slashes || relative.slashes; + result.href = result.format(); + return result; +}; + +Url.prototype.parseHost = function() { + var host = this.host; + var port = portPattern.exec(host); + if (port) { + port = port[0]; + if (port !== ':') { + this.port = port.substr(1); + } + host = host.substr(0, host.length - port.length); + } + if (host) this.hostname = host; +}; + + +/***/ }), +/* 9 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +var R = typeof Reflect === 'object' ? Reflect : null +var ReflectApply = R && typeof R.apply === 'function' + ? R.apply + : function ReflectApply(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + } + +var ReflectOwnKeys +if (R && typeof R.ownKeys === 'function') { + ReflectOwnKeys = R.ownKeys +} else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target) + .concat(Object.getOwnPropertySymbols(target)); + }; +} else { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target); + }; +} + +function ProcessEmitWarning(warning) { + if (console && console.warn) console.warn(warning); +} + +var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) { + return value !== value; +} + +function EventEmitter() { + EventEmitter.init.call(this); +} +module.exports = EventEmitter; +module.exports.once = once; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +var defaultMaxListeners = 10; + +function checkListener(listener) { + if (typeof listener !== 'function') { + throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener); + } +} + +Object.defineProperty(EventEmitter, 'defaultMaxListeners', { + enumerable: true, + get: function() { + return defaultMaxListeners; + }, + set: function(arg) { + if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.'); + } + defaultMaxListeners = arg; + } +}); + +EventEmitter.init = function() { + + if (this._events === undefined || + this._events === Object.getPrototypeOf(this)._events) { + this._events = Object.create(null); + this._eventsCount = 0; + } + + this._maxListeners = this._maxListeners || undefined; +}; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) { + throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.'); + } + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) + return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) args.push(arguments[i]); + var doError = (type === 'error'); + + var events = this._events; + if (events !== undefined) + doError = (doError && events.error === undefined); + else if (!doError) + return false; + + // If there is no 'error' event listener then throw. + if (doError) { + var er; + if (args.length > 0) + er = args[0]; + if (er instanceof Error) { + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + // At least give some kind of context to the user + var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : '')); + err.context = er; + throw err; // Unhandled 'error' event + } + + var handler = events[type]; + + if (handler === undefined) + return false; + + if (typeof handler === 'function') { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); + for (var i = 0; i < len; ++i) + ReflectApply(listeners[i], this, args); + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = Object.create(null); + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit('newListener', type, + listener.listener ? listener.listener : listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === 'function') { + // Adding the second element, need to change to array. + existing = events[type] = + prepend ? [listener, existing] : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + // eslint-disable-next-line no-restricted-syntax + var w = new Error('Possible EventEmitter memory leak detected. ' + + existing.length + ' ' + String(type) + ' listeners ' + + 'added. Use emitter.setMaxListeners() to ' + + 'increase limit'); + w.name = 'MaxListenersExceededWarning'; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + + return target; +} + +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.prependListener = + function prependListener(type, listener) { + return _addListener(this, type, listener, true); + }; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) + return this.listener.call(this.target); + return this.listener.apply(this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +EventEmitter.prototype.prependOnceListener = + function prependOnceListener(type, listener) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + +// Emits a 'removeListener' event if and only if the listener was removed. +EventEmitter.prototype.removeListener = + function removeListener(type, listener) { + var list, events, position, i, originalListener; + + checkListener(listener); + + events = this._events; + if (events === undefined) + return this; + + list = events[type]; + if (list === undefined) + return this; + + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else { + delete events[type]; + if (events.removeListener) + this.emit('removeListener', type, list.listener || listener); + } + } else if (typeof list !== 'function') { + position = -1; + + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + + if (position < 0) + return this; + + if (position === 0) + list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) + events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit('removeListener', type, originalListener || listener); + } + + return this; + }; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +EventEmitter.prototype.removeAllListeners = + function removeAllListeners(type) { + var listeners, events, i; + + events = this._events; + if (events === undefined) + return this; + + // not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else + delete events[type]; + } + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = Object.create(null); + this._eventsCount = 0; + return this; + } + + listeners = events[type]; + + if (typeof listeners === 'function') { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; + }; + +function _listeners(target, type, unwrap) { + var events = target._events; + + if (events === undefined) + return []; + + var evlistener = events[type]; + if (evlistener === undefined) + return []; + + if (typeof evlistener === 'function') + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? + unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); +} + +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +EventEmitter.listenerCount = function(emitter, type) { + if (typeof emitter.listenerCount === 'function') { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } +}; + +EventEmitter.prototype.listenerCount = listenerCount; +function listenerCount(type) { + var events = this._events; + + if (events !== undefined) { + var evlistener = events[type]; + + if (typeof evlistener === 'function') { + return 1; + } else if (evlistener !== undefined) { + return evlistener.length; + } + } + + return 0; +} + +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) + copy[i] = arr[i]; + return copy; +} + +function spliceOne(list, index) { + for (; index + 1 < list.length; index++) + list[index] = list[index + 1]; + list.pop(); +} + +function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; +} + +function once(emitter, name) { + return new Promise(function (resolve, reject) { + function errorListener(err) { + emitter.removeListener(name, resolver); + reject(err); + } + + function resolver() { + if (typeof emitter.removeListener === 'function') { + emitter.removeListener('error', errorListener); + } + resolve([].slice.call(arguments)); + }; + + eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); + if (name !== 'error') { + addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); + } + }); +} + +function addErrorHandlerIfEventEmitter(emitter, handler, flags) { + if (typeof emitter.on === 'function') { + eventTargetAgnosticAddListener(emitter, 'error', handler, flags); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === 'function') { + if (flags.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === 'function') { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we do not listen for `error` events here. + emitter.addEventListener(name, function wrapListener(arg) { + // IE does not have builtin `{ once: true }` support so we + // have to do it manually. + if (flags.once) { + emitter.removeEventListener(name, wrapListener); + } + listener(arg); + }); + } else { + throw new TypeError('The "emitter" argument must be of type EventEmitter. Received type ' + typeof emitter); + } +} + + +/***/ }), +/* 10 */ +/***/ (function(module, exports) { + +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; + + +/***/ }), +/* 11 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global) {var ClientRequest = __webpack_require__(32) +var response = __webpack_require__(13) +var extend = __webpack_require__(41) +var statusCodes = __webpack_require__(42) +var url = __webpack_require__(8) + +var http = exports + +http.request = function (opts, cb) { + if (typeof opts === 'string') + opts = url.parse(opts) + else + opts = extend(opts) + + // Normally, the page is loaded from http or https, so not specifying a protocol + // will result in a (valid) protocol-relative url. However, this won't work if + // the protocol is something else, like 'file:' + var defaultProtocol = global.location.protocol.search(/^https?:$/) === -1 ? 'http:' : '' + + var protocol = opts.protocol || defaultProtocol + var host = opts.hostname || opts.host + var port = opts.port + var path = opts.path || '/' + + // Necessary for IPv6 addresses + if (host && host.indexOf(':') !== -1) + host = '[' + host + ']' + + // This may be a relative url. The browser should always be able to interpret it correctly. + opts.url = (host ? (protocol + '//' + host) : '') + (port ? ':' + port : '') + path + opts.method = (opts.method || 'GET').toUpperCase() + opts.headers = opts.headers || {} + + // Also valid opts.auth, opts.mode + + var req = new ClientRequest(opts) + if (cb) + req.on('response', cb) + return req +} + +http.get = function get (opts, cb) { + var req = http.request(opts, cb) + req.end() + return req +} + +http.ClientRequest = ClientRequest +http.IncomingMessage = response.IncomingMessage + +http.Agent = function () {} +http.Agent.defaultMaxSockets = 4 + +http.globalAgent = new http.Agent() + +http.STATUS_CODES = statusCodes + +http.METHODS = [ + 'CHECKOUT', + 'CONNECT', + 'COPY', + 'DELETE', + 'GET', + 'HEAD', + 'LOCK', + 'M-SEARCH', + 'MERGE', + 'MKACTIVITY', + 'MKCOL', + 'MOVE', + 'NOTIFY', + 'OPTIONS', + 'PATCH', + 'POST', + 'PROPFIND', + 'PROPPATCH', + 'PURGE', + 'PUT', + 'REPORT', + 'SEARCH', + 'SUBSCRIBE', + 'TRACE', + 'UNLOCK', + 'UNSUBSCRIBE' +] +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 12 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global) {exports.fetch = isFunction(global.fetch) && isFunction(global.ReadableStream) + +exports.writableStream = isFunction(global.WritableStream) + +exports.abortController = isFunction(global.AbortController) + +exports.blobConstructor = false +try { + new Blob([new ArrayBuffer(1)]) + exports.blobConstructor = true +} catch (e) {} + +// The xhr request to example.com may violate some restrictive CSP configurations, +// so if we're running in a browser that supports `fetch`, avoid calling getXHR() +// and assume support for certain features below. +var xhr +function getXHR () { + // Cache the xhr value + if (xhr !== undefined) return xhr + + if (global.XMLHttpRequest) { + xhr = new global.XMLHttpRequest() + // If XDomainRequest is available (ie only, where xhr might not work + // cross domain), use the page location. Otherwise use example.com + // Note: this doesn't actually make an http request. + try { + xhr.open('GET', global.XDomainRequest ? '/' : 'https://example.com') + } catch(e) { + xhr = null + } + } else { + // Service workers don't have XHR + xhr = null + } + return xhr +} + +function checkTypeSupport (type) { + var xhr = getXHR() + if (!xhr) return false + try { + xhr.responseType = type + return xhr.responseType === type + } catch (e) {} + return false +} + +// For some strange reason, Safari 7.0 reports typeof global.ArrayBuffer === 'object'. +// Safari 7.1 appears to have fixed this bug. +var haveArrayBuffer = typeof global.ArrayBuffer !== 'undefined' +var haveSlice = haveArrayBuffer && isFunction(global.ArrayBuffer.prototype.slice) + +// If fetch is supported, then arraybuffer will be supported too. Skip calling +// checkTypeSupport(), since that calls getXHR(). +exports.arraybuffer = exports.fetch || (haveArrayBuffer && checkTypeSupport('arraybuffer')) + +// These next two tests unavoidably show warnings in Chrome. Since fetch will always +// be used if it's available, just return false for these to avoid the warnings. +exports.msstream = !exports.fetch && haveSlice && checkTypeSupport('ms-stream') +exports.mozchunkedarraybuffer = !exports.fetch && haveArrayBuffer && + checkTypeSupport('moz-chunked-arraybuffer') + +// If fetch is supported, then overrideMimeType will be supported too. Skip calling +// getXHR(). +exports.overrideMimeType = exports.fetch || (getXHR() ? isFunction(getXHR().overrideMimeType) : false) + +exports.vbArray = isFunction(global.VBArray) + +function isFunction (value) { + return typeof value === 'function' +} + +xhr = null // Help gc + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 13 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(process, Buffer, global) {var capability = __webpack_require__(12) +var inherits = __webpack_require__(2) +var stream = __webpack_require__(14) + +var rStates = exports.readyStates = { + UNSENT: 0, + OPENED: 1, + HEADERS_RECEIVED: 2, + LOADING: 3, + DONE: 4 +} + +var IncomingMessage = exports.IncomingMessage = function (xhr, response, mode, fetchTimer) { + var self = this + stream.Readable.call(self) + + self._mode = mode + self.headers = {} + self.rawHeaders = [] + self.trailers = {} + self.rawTrailers = [] + + // Fake the 'close' event, but only once 'end' fires + self.on('end', function () { + // The nextTick is necessary to prevent the 'request' module from causing an infinite loop + process.nextTick(function () { + self.emit('close') + }) + }) + + if (mode === 'fetch') { + self._fetchResponse = response + + self.url = response.url + self.statusCode = response.status + self.statusMessage = response.statusText + + response.headers.forEach(function (header, key){ + self.headers[key.toLowerCase()] = header + self.rawHeaders.push(key, header) + }) + + if (capability.writableStream) { + var writable = new WritableStream({ + write: function (chunk) { + return new Promise(function (resolve, reject) { + if (self._destroyed) { + reject() + } else if(self.push(new Buffer(chunk))) { + resolve() + } else { + self._resumeFetch = resolve + } + }) + }, + close: function () { + global.clearTimeout(fetchTimer) + if (!self._destroyed) + self.push(null) + }, + abort: function (err) { + if (!self._destroyed) + self.emit('error', err) + } + }) + + try { + response.body.pipeTo(writable).catch(function (err) { + global.clearTimeout(fetchTimer) + if (!self._destroyed) + self.emit('error', err) + }) + return + } catch (e) {} // pipeTo method isn't defined. Can't find a better way to feature test this + } + // fallback for when writableStream or pipeTo aren't available + var reader = response.body.getReader() + function read () { + reader.read().then(function (result) { + if (self._destroyed) + return + if (result.done) { + global.clearTimeout(fetchTimer) + self.push(null) + return + } + self.push(new Buffer(result.value)) + read() + }).catch(function (err) { + global.clearTimeout(fetchTimer) + if (!self._destroyed) + self.emit('error', err) + }) + } + read() + } else { + self._xhr = xhr + self._pos = 0 + + self.url = xhr.responseURL + self.statusCode = xhr.status + self.statusMessage = xhr.statusText + var headers = xhr.getAllResponseHeaders().split(/\r?\n/) + headers.forEach(function (header) { + var matches = header.match(/^([^:]+):\s*(.*)/) + if (matches) { + var key = matches[1].toLowerCase() + if (key === 'set-cookie') { + if (self.headers[key] === undefined) { + self.headers[key] = [] + } + self.headers[key].push(matches[2]) + } else if (self.headers[key] !== undefined) { + self.headers[key] += ', ' + matches[2] + } else { + self.headers[key] = matches[2] + } + self.rawHeaders.push(matches[1], matches[2]) + } + }) + + self._charset = 'x-user-defined' + if (!capability.overrideMimeType) { + var mimeType = self.rawHeaders['mime-type'] + if (mimeType) { + var charsetMatch = mimeType.match(/;\s*charset=([^;])(;|$)/) + if (charsetMatch) { + self._charset = charsetMatch[1].toLowerCase() + } + } + if (!self._charset) + self._charset = 'utf-8' // best guess + } + } +} + +inherits(IncomingMessage, stream.Readable) + +IncomingMessage.prototype._read = function () { + var self = this + + var resolve = self._resumeFetch + if (resolve) { + self._resumeFetch = null + resolve() + } +} + +IncomingMessage.prototype._onXHRProgress = function () { + var self = this + + var xhr = self._xhr + + var response = null + switch (self._mode) { + case 'text:vbarray': // For IE9 + if (xhr.readyState !== rStates.DONE) + break + try { + // This fails in IE8 + response = new global.VBArray(xhr.responseBody).toArray() + } catch (e) {} + if (response !== null) { + self.push(new Buffer(response)) + break + } + // Falls through in IE8 + case 'text': + try { // This will fail when readyState = 3 in IE9. Switch mode and wait for readyState = 4 + response = xhr.responseText + } catch (e) { + self._mode = 'text:vbarray' + break + } + if (response.length > self._pos) { + var newData = response.substr(self._pos) + if (self._charset === 'x-user-defined') { + var buffer = new Buffer(newData.length) + for (var i = 0; i < newData.length; i++) + buffer[i] = newData.charCodeAt(i) & 0xff + + self.push(buffer) + } else { + self.push(newData, self._charset) + } + self._pos = response.length + } + break + case 'arraybuffer': + if (xhr.readyState !== rStates.DONE || !xhr.response) + break + response = xhr.response + self.push(new Buffer(new Uint8Array(response))) + break + case 'moz-chunked-arraybuffer': // take whole + response = xhr.response + if (xhr.readyState !== rStates.LOADING || !response) + break + self.push(new Buffer(new Uint8Array(response))) + break + case 'ms-stream': + response = xhr.response + if (xhr.readyState !== rStates.LOADING) + break + var reader = new global.MSStreamReader() + reader.onprogress = function () { + if (reader.result.byteLength > self._pos) { + self.push(new Buffer(new Uint8Array(reader.result.slice(self._pos)))) + self._pos = reader.result.byteLength + } + } + reader.onload = function () { + self.push(null) + } + // reader.onerror = ??? // TODO: this + reader.readAsArrayBuffer(response) + break + } + + // The ms-stream case handles end separately in reader.onload() + if (self._xhr.readyState === rStates.DONE && self._mode !== 'ms-stream') { + self.push(null) + } +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1), __webpack_require__(3).Buffer, __webpack_require__(0))) + +/***/ }), +/* 14 */ +/***/ (function(module, exports, __webpack_require__) { + +exports = module.exports = __webpack_require__(15); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = __webpack_require__(18); +exports.Duplex = __webpack_require__(4); +exports.Transform = __webpack_require__(20); +exports.PassThrough = __webpack_require__(39); + + +/***/ }), +/* 15 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(global, process) {// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +module.exports = Readable; + +/**/ +var isArray = __webpack_require__(10); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = __webpack_require__(9).EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __webpack_require__(16); +/**/ + +/**/ + +var Buffer = __webpack_require__(7).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +/**/ +var debugUtil = __webpack_require__(33); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = __webpack_require__(34); +var destroyImpl = __webpack_require__(17); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || __webpack_require__(4); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = __webpack_require__(19).StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || __webpack_require__(4); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = __webpack_require__(19).StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, unpipeInfo); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0), __webpack_require__(1))) + +/***/ }), +/* 16 */ +/***/ (function(module, exports, __webpack_require__) { + +module.exports = __webpack_require__(9).EventEmitter; + + +/***/ }), +/* 17 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err && (!this._writableState || !this._writableState.errorEmitted)) { + pna.nextTick(emitErrorNT, this, err); + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + pna.nextTick(emitErrorNT, _this, err); + if (_this._writableState) { + _this._writableState.errorEmitted = true; + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; + +/***/ }), +/* 18 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* WEBPACK VAR INJECTION */(function(process, setImmediate, global) {// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + + +/**/ + +var pna = __webpack_require__(6); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +/**/ +var internalUtil = { + deprecate: __webpack_require__(38) +}; +/**/ + +/**/ +var Stream = __webpack_require__(16); +/**/ + +/**/ + +var Buffer = __webpack_require__(7).Buffer; +var OurUint8Array = global.Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = __webpack_require__(17); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || __webpack_require__(4); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || __webpack_require__(4); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1), __webpack_require__(36).setImmediate, __webpack_require__(0))) + +/***/ }), +/* 19 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = __webpack_require__(7).Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), +/* 20 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + + +module.exports = Transform; + +var Duplex = __webpack_require__(4); + +/**/ +var util = Object.create(__webpack_require__(5)); +util.inherits = __webpack_require__(2); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} + +/***/ }), +/* 21 */ +/***/ (function(module, exports, __webpack_require__) { + +var EventSource = __webpack_require__(22) + +if (typeof window === 'object') { + window.EventSourcePolyfill = EventSource + if (!window.EventSource) window.EventSource = EventSource + module.exports = window.EventSource +} else { + module.exports = EventSource +} + + +/***/ }), +/* 22 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(process, Buffer) {var parse = __webpack_require__(8).parse +var events = __webpack_require__(9) +var https = __webpack_require__(31) +var http = __webpack_require__(11) +var util = __webpack_require__(43) + +var httpsOptions = [ + 'pfx', 'key', 'passphrase', 'cert', 'ca', 'ciphers', + 'rejectUnauthorized', 'secureProtocol', 'servername', 'checkServerIdentity' +] + +var bom = [239, 187, 191] +var colon = 58 +var space = 32 +var lineFeed = 10 +var carriageReturn = 13 + +function hasBom (buf) { + return bom.every(function (charCode, index) { + return buf[index] === charCode + }) +} + +/** + * Creates a new EventSource object + * + * @param {String} url the URL to which to connect + * @param {Object} [eventSourceInitDict] extra init params. See README for details. + * @api public + **/ +function EventSource (url, eventSourceInitDict) { + var readyState = EventSource.CONNECTING + var headers = eventSourceInitDict && eventSourceInitDict.headers + var hasNewOrigin = false + Object.defineProperty(this, 'readyState', { + get: function () { + return readyState + } + }) + + Object.defineProperty(this, 'url', { + get: function () { + return url + } + }) + + var self = this + self.reconnectInterval = 1000 + self.connectionInProgress = false + + function onConnectionClosed (message) { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CONNECTING + _emit('error', new Event('error', {message: message})) + + // The url may have been changed by a temporary redirect. If that's the case, + // revert it now, and flag that we are no longer pointing to a new origin + if (reconnectUrl) { + url = reconnectUrl + reconnectUrl = null + hasNewOrigin = false + } + setTimeout(function () { + if (readyState !== EventSource.CONNECTING || self.connectionInProgress) { + return + } + self.connectionInProgress = true + connect() + }, self.reconnectInterval) + } + + var req + var lastEventId = '' + if (headers && headers['Last-Event-ID']) { + lastEventId = headers['Last-Event-ID'] + delete headers['Last-Event-ID'] + } + + var discardTrailingNewline = false + var data = '' + var eventName = '' + + var reconnectUrl = null + + function connect () { + var options = parse(url) + var isSecure = options.protocol === 'https:' + options.headers = { 'Cache-Control': 'no-cache', 'Accept': 'text/event-stream' } + if (lastEventId) options.headers['Last-Event-ID'] = lastEventId + if (headers) { + var reqHeaders = hasNewOrigin ? removeUnsafeHeaders(headers) : headers + for (var i in reqHeaders) { + var header = reqHeaders[i] + if (header) { + options.headers[i] = header + } + } + } + + // Legacy: this should be specified as `eventSourceInitDict.https.rejectUnauthorized`, + // but for now exists as a backwards-compatibility layer + options.rejectUnauthorized = !(eventSourceInitDict && !eventSourceInitDict.rejectUnauthorized) + + if (eventSourceInitDict && eventSourceInitDict.createConnection !== undefined) { + options.createConnection = eventSourceInitDict.createConnection + } + + // If specify http proxy, make the request to sent to the proxy server, + // and include the original url in path and Host headers + var useProxy = eventSourceInitDict && eventSourceInitDict.proxy + if (useProxy) { + var proxy = parse(eventSourceInitDict.proxy) + isSecure = proxy.protocol === 'https:' + + options.protocol = isSecure ? 'https:' : 'http:' + options.path = url + options.headers.Host = options.host + options.hostname = proxy.hostname + options.host = proxy.host + options.port = proxy.port + } + + // If https options are specified, merge them into the request options + if (eventSourceInitDict && eventSourceInitDict.https) { + for (var optName in eventSourceInitDict.https) { + if (httpsOptions.indexOf(optName) === -1) { + continue + } + + var option = eventSourceInitDict.https[optName] + if (option !== undefined) { + options[optName] = option + } + } + } + + // Pass this on to the XHR + if (eventSourceInitDict && eventSourceInitDict.withCredentials !== undefined) { + options.withCredentials = eventSourceInitDict.withCredentials + } + + req = (isSecure ? https : http).request(options, function (res) { + self.connectionInProgress = false + // Handle HTTP errors + if (res.statusCode === 500 || res.statusCode === 502 || res.statusCode === 503 || res.statusCode === 504) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + onConnectionClosed() + return + } + + // Handle HTTP redirects + if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307) { + var location = res.headers.location + if (!location) { + // Server sent redirect response without Location header. + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return + } + var prevOrigin = getOrigin(url) + var nextOrigin = getOrigin(location) + hasNewOrigin = prevOrigin !== nextOrigin + if (res.statusCode === 307) reconnectUrl = url + url = location + process.nextTick(connect) + return + } + + if (res.statusCode !== 200) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return self.close() + } + + readyState = EventSource.OPEN + res.on('close', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + + res.on('end', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + _emit('open', new Event('open')) + + // text/event-stream parser adapted from webkit's + // Source/WebCore/page/EventSource.cpp + var isFirst = true + var buf + var startingPos = 0 + var startingFieldLength = -1 + res.on('data', function (chunk) { + buf = buf ? Buffer.concat([buf, chunk]) : chunk + if (isFirst && hasBom(buf)) { + buf = buf.slice(bom.length) + } + + isFirst = false + var pos = 0 + var length = buf.length + + while (pos < length) { + if (discardTrailingNewline) { + if (buf[pos] === lineFeed) { + ++pos + } + discardTrailingNewline = false + } + + var lineLength = -1 + var fieldLength = startingFieldLength + var c + + for (var i = startingPos; lineLength < 0 && i < length; ++i) { + c = buf[i] + if (c === colon) { + if (fieldLength < 0) { + fieldLength = i - pos + } + } else if (c === carriageReturn) { + discardTrailingNewline = true + lineLength = i - pos + } else if (c === lineFeed) { + lineLength = i - pos + } + } + + if (lineLength < 0) { + startingPos = length - pos + startingFieldLength = fieldLength + break + } else { + startingPos = 0 + startingFieldLength = -1 + } + + parseEventStreamLine(buf, pos, fieldLength, lineLength) + + pos += lineLength + 1 + } + + if (pos === length) { + buf = void 0 + } else if (pos > 0) { + buf = buf.slice(pos) + } + }) + }) + + req.on('error', function (err) { + self.connectionInProgress = false + onConnectionClosed(err.message) + }) + + if (req.setNoDelay) req.setNoDelay(true) + req.end() + } + + connect() + + function _emit () { + if (self.listeners(arguments[0]).length > 0) { + self.emit.apply(self, arguments) + } + } + + this._close = function () { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CLOSED + if (req.abort) req.abort() + if (req.xhr && req.xhr.abort) req.xhr.abort() + } + + function parseEventStreamLine (buf, pos, fieldLength, lineLength) { + if (lineLength === 0) { + if (data.length > 0) { + var type = eventName || 'message' + _emit(type, new MessageEvent(type, { + data: data.slice(0, -1), // remove trailing newline + lastEventId: lastEventId, + origin: getOrigin(url) + })) + data = '' + } + eventName = void 0 + } else if (fieldLength > 0) { + var noValue = fieldLength < 0 + var step = 0 + var field = buf.slice(pos, pos + (noValue ? lineLength : fieldLength)).toString() + + if (noValue) { + step = lineLength + } else if (buf[pos + fieldLength + 1] !== space) { + step = fieldLength + 1 + } else { + step = fieldLength + 2 + } + pos += step + + var valueLength = lineLength - step + var value = buf.slice(pos, pos + valueLength).toString() + + if (field === 'data') { + data += value + '\n' + } else if (field === 'event') { + eventName = value + } else if (field === 'id') { + lastEventId = value + } else if (field === 'retry') { + var retry = parseInt(value, 10) + if (!Number.isNaN(retry)) { + self.reconnectInterval = retry + } + } + } + } +} + +module.exports = EventSource + +util.inherits(EventSource, events.EventEmitter) +EventSource.prototype.constructor = EventSource; // make stacktraces readable + +['open', 'error', 'message'].forEach(function (method) { + Object.defineProperty(EventSource.prototype, 'on' + method, { + /** + * Returns the current listener + * + * @return {Mixed} the set function or undefined + * @api private + */ + get: function get () { + var listener = this.listeners(method)[0] + return listener ? (listener._listener ? listener._listener : listener) : undefined + }, + + /** + * Start listening for events + * + * @param {Function} listener the listener + * @return {Mixed} the set function or undefined + * @api private + */ + set: function set (listener) { + this.removeAllListeners(method) + this.addEventListener(method, listener) + } + }) +}) + +/** + * Ready states + */ +Object.defineProperty(EventSource, 'CONNECTING', {enumerable: true, value: 0}) +Object.defineProperty(EventSource, 'OPEN', {enumerable: true, value: 1}) +Object.defineProperty(EventSource, 'CLOSED', {enumerable: true, value: 2}) + +EventSource.prototype.CONNECTING = 0 +EventSource.prototype.OPEN = 1 +EventSource.prototype.CLOSED = 2 + +/** + * Closes the connection, if one is made, and sets the readyState attribute to 2 (closed) + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventSource/close + * @api public + */ +EventSource.prototype.close = function () { + this._close() +} + +/** + * Emulates the W3C Browser based WebSocket interface using addEventListener. + * + * @param {String} type A string representing the event type to listen out for + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.addEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.addEventListener = function addEventListener (type, listener) { + if (typeof listener === 'function') { + // store a reference so we can return the original function again + listener._listener = listener + this.on(type, listener) + } +} + +/** + * Emulates the W3C Browser based WebSocket interface using dispatchEvent. + * + * @param {Event} event An event to be dispatched + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/dispatchEvent + * @api public + */ +EventSource.prototype.dispatchEvent = function dispatchEvent (event) { + if (!event.type) { + throw new Error('UNSPECIFIED_EVENT_TYPE_ERR') + } + // if event is instance of an CustomEvent (or has 'details' property), + // send the detail object as the payload for the event + this.emit(event.type, event.detail) +} + +/** + * Emulates the W3C Browser based WebSocket interface using removeEventListener. + * + * @param {String} type A string representing the event type to remove + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.removeEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.removeEventListener = function removeEventListener (type, listener) { + if (typeof listener === 'function') { + listener._listener = undefined + this.removeListener(type, listener) + } +} + +/** + * W3C Event + * + * @see http://www.w3.org/TR/DOM-Level-3-Events/#interface-Event + * @api private + */ +function Event (type, optionalProperties) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + if (optionalProperties) { + for (var f in optionalProperties) { + if (optionalProperties.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: optionalProperties[f], enumerable: true }) + } + } + } +} + +/** + * W3C MessageEvent + * + * @see http://www.w3.org/TR/webmessaging/#event-definitions + * @api private + */ +function MessageEvent (type, eventInitDict) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + for (var f in eventInitDict) { + if (eventInitDict.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: eventInitDict[f], enumerable: true }) + } + } +} + +/** + * Returns a new object of headers that does not include any authorization and cookie headers + * + * @param {Object} headers An object of headers ({[headerName]: headerValue}) + * @return {Object} a new object of headers + * @api private + */ +function removeUnsafeHeaders (headers) { + var safe = {} + for (var key in headers) { + if (/^(cookie|authorization)$/i.test(key)) { + continue + } + + safe[key] = headers[key] + } + + return safe +} + +/** + * Transform an URL to a valid origin value. + * + * @param {String|Object} url URL to transform to it's origin. + * @returns {String} The origin. + * @api private + */ +function getOrigin (url) { + if (typeof url === 'string') url = parse(url) + if (!url.protocol || !url.hostname) return 'null' + return (url.protocol + '//' + url.host).toLowerCase() +} + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(1), __webpack_require__(3).Buffer)) + +/***/ }), +/* 23 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray + +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} + +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 + +function getLens (b64) { + var len = b64.length + + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} + + +/***/ }), +/* 24 */ +/***/ (function(module, exports) { + +/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */ +exports.read = function (buffer, offset, isLE, mLen, nBytes) { + var e, m + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var nBits = -7 + var i = isLE ? (nBytes - 1) : 0 + var d = isLE ? -1 : 1 + var s = buffer[offset + i] + + i += d + + e = s & ((1 << (-nBits)) - 1) + s >>= (-nBits) + nBits += eLen + for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + m = e & ((1 << (-nBits)) - 1) + e >>= (-nBits) + nBits += mLen + for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + if (e === 0) { + e = 1 - eBias + } else if (e === eMax) { + return m ? NaN : ((s ? -1 : 1) * Infinity) + } else { + m = m + Math.pow(2, mLen) + e = e - eBias + } + return (s ? -1 : 1) * m * Math.pow(2, e - mLen) +} + +exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { + var e, m, c + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) + var i = isLE ? 0 : (nBytes - 1) + var d = isLE ? 1 : -1 + var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 + + value = Math.abs(value) + + if (isNaN(value) || value === Infinity) { + m = isNaN(value) ? 1 : 0 + e = eMax + } else { + e = Math.floor(Math.log(value) / Math.LN2) + if (value * (c = Math.pow(2, -e)) < 1) { + e-- + c *= 2 + } + if (e + eBias >= 1) { + value += rt / c + } else { + value += rt * Math.pow(2, 1 - eBias) + } + if (value * c >= 2) { + e++ + c /= 2 + } + + if (e + eBias >= eMax) { + m = 0 + e = eMax + } else if (e + eBias >= 1) { + m = ((value * c) - 1) * Math.pow(2, mLen) + e = e + eBias + } else { + m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) + e = 0 + } + } + + for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} + + e = (e << mLen) | m + eLen += mLen + for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} + + buffer[offset + i - d] |= s * 128 +} + + +/***/ }), +/* 25 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(module, global) {var __WEBPACK_AMD_DEFINE_RESULT__;/*! https://mths.be/punycode v1.4.1 by @mathias */ +;(function(root) { + + /** Detect free variables */ + var freeExports = typeof exports == 'object' && exports && + !exports.nodeType && exports; + var freeModule = typeof module == 'object' && module && + !module.nodeType && module; + var freeGlobal = typeof global == 'object' && global; + if ( + freeGlobal.global === freeGlobal || + freeGlobal.window === freeGlobal || + freeGlobal.self === freeGlobal + ) { + root = freeGlobal; + } + + /** + * The `punycode` object. + * @name punycode + * @type Object + */ + var punycode, + + /** Highest positive signed 32-bit float value */ + maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 + + /** Bootstring parameters */ + base = 36, + tMin = 1, + tMax = 26, + skew = 38, + damp = 700, + initialBias = 72, + initialN = 128, // 0x80 + delimiter = '-', // '\x2D' + + /** Regular expressions */ + regexPunycode = /^xn--/, + regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars + regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators + + /** Error messages */ + errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' + }, + + /** Convenience shortcuts */ + baseMinusTMin = base - tMin, + floor = Math.floor, + stringFromCharCode = String.fromCharCode, + + /** Temporary variable */ + key; + + /*--------------------------------------------------------------------------*/ + + /** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ + function error(type) { + throw new RangeError(errors[type]); + } + + /** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ + function map(array, fn) { + var length = array.length; + var result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; + } + + /** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ + function mapDomain(string, fn) { + var parts = string.split('@'); + var result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + var labels = string.split('.'); + var encoded = map(labels, fn).join('.'); + return result + encoded; + } + + /** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ + function ucs2decode(string) { + var output = [], + counter = 0, + length = string.length, + value, + extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // low surrogate + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; + } + + /** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ + function ucs2encode(array) { + return map(array, function(value) { + var output = ''; + if (value > 0xFFFF) { + value -= 0x10000; + output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); + value = 0xDC00 | value & 0x3FF; + } + output += stringFromCharCode(value); + return output; + }).join(''); + } + + /** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ + function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; + } + + /** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ + function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); + } + + /** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ + function adapt(delta, numPoints, firstTime) { + var k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); + } + + /** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ + function decode(input) { + // Don't use UCS-2 + var output = [], + inputLength = input.length, + out, + i = 0, + n = initialN, + bias = initialBias, + basic, + j, + index, + oldi, + w, + k, + digit, + t, + /** Cached calculation results */ + baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + + } + + return ucs2encode(output); + } + + /** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ + function encode(input) { + var n, + delta, + handledCPCount, + basicLength, + bias, + j, + m, + q, + k, + t, + currentValue, + output = [], + /** `inputLength` will hold the number of code points in `input`. */ + inputLength, + /** Cached calculation results */ + handledCPCountPlusOne, + baseMinusT, + qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base; /* no condition */; k += base) { + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); + } + + /** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ + function toUnicode(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); + } + + /** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ + function toASCII(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); + } + + /*--------------------------------------------------------------------------*/ + + /** Define the public API */ + punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '1.4.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode + }; + + /** Expose `punycode` */ + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + true + ) { + !(__WEBPACK_AMD_DEFINE_RESULT__ = (function() { + return punycode; + }).call(exports, __webpack_require__, exports, module), + __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); + } else if (freeExports && freeModule) { + if (module.exports == freeExports) { + // in Node.js, io.js, or RingoJS v0.8.0+ + freeModule.exports = punycode; + } else { + // in Narwhal or RingoJS v0.7.0- + for (key in punycode) { + punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]); + } + } + } else { + // in Rhino or a web browser + root.punycode = punycode; + } + +}(this)); + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(26)(module), __webpack_require__(0))) + +/***/ }), +/* 26 */ +/***/ (function(module, exports) { + +module.exports = function(module) { + if(!module.webpackPolyfill) { + module.deprecate = function() {}; + module.paths = []; + // module.parent = undefined by default + if(!module.children) module.children = []; + Object.defineProperty(module, "loaded", { + enumerable: true, + get: function() { + return module.l; + } + }); + Object.defineProperty(module, "id", { + enumerable: true, + get: function() { + return module.i; + } + }); + module.webpackPolyfill = 1; + } + return module; +}; + + +/***/ }), +/* 27 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +module.exports = { + isString: function(arg) { + return typeof(arg) === 'string'; + }, + isObject: function(arg) { + return typeof(arg) === 'object' && arg !== null; + }, + isNull: function(arg) { + return arg === null; + }, + isNullOrUndefined: function(arg) { + return arg == null; + } +}; + + +/***/ }), +/* 28 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +exports.decode = exports.parse = __webpack_require__(29); +exports.encode = exports.stringify = __webpack_require__(30); + + +/***/ }), +/* 29 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +// If obj.hasOwnProperty has been overridden, then calling +// obj.hasOwnProperty(prop) will break. +// See: https://github.com/joyent/node/issues/1707 +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} + +module.exports = function(qs, sep, eq, options) { + sep = sep || '&'; + eq = eq || '='; + var obj = {}; + + if (typeof qs !== 'string' || qs.length === 0) { + return obj; + } + + var regexp = /\+/g; + qs = qs.split(sep); + + var maxKeys = 1000; + if (options && typeof options.maxKeys === 'number') { + maxKeys = options.maxKeys; + } + + var len = qs.length; + // maxKeys <= 0 means that we should not limit keys count + if (maxKeys > 0 && len > maxKeys) { + len = maxKeys; + } + + for (var i = 0; i < len; ++i) { + var x = qs[i].replace(regexp, '%20'), + idx = x.indexOf(eq), + kstr, vstr, k, v; + + if (idx >= 0) { + kstr = x.substr(0, idx); + vstr = x.substr(idx + 1); + } else { + kstr = x; + vstr = ''; + } + + k = decodeURIComponent(kstr); + v = decodeURIComponent(vstr); + + if (!hasOwnProperty(obj, k)) { + obj[k] = v; + } else if (isArray(obj[k])) { + obj[k].push(v); + } else { + obj[k] = [obj[k], v]; + } + } + + return obj; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + + +/***/ }), +/* 30 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +var stringifyPrimitive = function(v) { + switch (typeof v) { + case 'string': + return v; + + case 'boolean': + return v ? 'true' : 'false'; + + case 'number': + return isFinite(v) ? v : ''; + + default: + return ''; + } +}; + +module.exports = function(obj, sep, eq, name) { + sep = sep || '&'; + eq = eq || '='; + if (obj === null) { + obj = undefined; + } + + if (typeof obj === 'object') { + return map(objectKeys(obj), function(k) { + var ks = encodeURIComponent(stringifyPrimitive(k)) + eq; + if (isArray(obj[k])) { + return map(obj[k], function(v) { + return ks + encodeURIComponent(stringifyPrimitive(v)); + }).join(sep); + } else { + return ks + encodeURIComponent(stringifyPrimitive(obj[k])); + } + }).join(sep); + + } + + if (!name) return ''; + return encodeURIComponent(stringifyPrimitive(name)) + eq + + encodeURIComponent(stringifyPrimitive(obj)); +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + +function map (xs, f) { + if (xs.map) return xs.map(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + res.push(f(xs[i], i)); + } + return res; +} + +var objectKeys = Object.keys || function (obj) { + var res = []; + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key); + } + return res; +}; + + +/***/ }), +/* 31 */ +/***/ (function(module, exports, __webpack_require__) { + +var http = __webpack_require__(11) +var url = __webpack_require__(8) + +var https = module.exports + +for (var key in http) { + if (http.hasOwnProperty(key)) https[key] = http[key] +} + +https.request = function (params, cb) { + params = validateParams(params) + return http.request.call(this, params, cb) +} + +https.get = function (params, cb) { + params = validateParams(params) + return http.get.call(this, params, cb) +} + +function validateParams (params) { + if (typeof params === 'string') { + params = url.parse(params) + } + if (!params.protocol) { + params.protocol = 'https:' + } + if (params.protocol !== 'https:') { + throw new Error('Protocol "' + params.protocol + '" not supported. Expected "https:"') + } + return params +} + + +/***/ }), +/* 32 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(Buffer, global, process) {var capability = __webpack_require__(12) +var inherits = __webpack_require__(2) +var response = __webpack_require__(13) +var stream = __webpack_require__(14) +var toArrayBuffer = __webpack_require__(40) + +var IncomingMessage = response.IncomingMessage +var rStates = response.readyStates + +function decideMode (preferBinary, useFetch) { + if (capability.fetch && useFetch) { + return 'fetch' + } else if (capability.mozchunkedarraybuffer) { + return 'moz-chunked-arraybuffer' + } else if (capability.msstream) { + return 'ms-stream' + } else if (capability.arraybuffer && preferBinary) { + return 'arraybuffer' + } else if (capability.vbArray && preferBinary) { + return 'text:vbarray' + } else { + return 'text' + } +} + +var ClientRequest = module.exports = function (opts) { + var self = this + stream.Writable.call(self) + + self._opts = opts + self._body = [] + self._headers = {} + if (opts.auth) + self.setHeader('Authorization', 'Basic ' + new Buffer(opts.auth).toString('base64')) + Object.keys(opts.headers).forEach(function (name) { + self.setHeader(name, opts.headers[name]) + }) + + var preferBinary + var useFetch = true + if (opts.mode === 'disable-fetch' || ('requestTimeout' in opts && !capability.abortController)) { + // If the use of XHR should be preferred. Not typically needed. + useFetch = false + preferBinary = true + } else if (opts.mode === 'prefer-streaming') { + // If streaming is a high priority but binary compatibility and + // the accuracy of the 'content-type' header aren't + preferBinary = false + } else if (opts.mode === 'allow-wrong-content-type') { + // If streaming is more important than preserving the 'content-type' header + preferBinary = !capability.overrideMimeType + } else if (!opts.mode || opts.mode === 'default' || opts.mode === 'prefer-fast') { + // Use binary if text streaming may corrupt data or the content-type header, or for speed + preferBinary = true + } else { + throw new Error('Invalid value for opts.mode') + } + self._mode = decideMode(preferBinary, useFetch) + self._fetchTimer = null + + self.on('finish', function () { + self._onFinish() + }) +} + +inherits(ClientRequest, stream.Writable) + +ClientRequest.prototype.setHeader = function (name, value) { + var self = this + var lowerName = name.toLowerCase() + // This check is not necessary, but it prevents warnings from browsers about setting unsafe + // headers. To be honest I'm not entirely sure hiding these warnings is a good thing, but + // http-browserify did it, so I will too. + if (unsafeHeaders.indexOf(lowerName) !== -1) + return + + self._headers[lowerName] = { + name: name, + value: value + } +} + +ClientRequest.prototype.getHeader = function (name) { + var header = this._headers[name.toLowerCase()] + if (header) + return header.value + return null +} + +ClientRequest.prototype.removeHeader = function (name) { + var self = this + delete self._headers[name.toLowerCase()] +} + +ClientRequest.prototype._onFinish = function () { + var self = this + + if (self._destroyed) + return + var opts = self._opts + + var headersObj = self._headers + var body = null + if (opts.method !== 'GET' && opts.method !== 'HEAD') { + if (capability.arraybuffer) { + body = toArrayBuffer(Buffer.concat(self._body)) + } else if (capability.blobConstructor) { + body = new global.Blob(self._body.map(function (buffer) { + return toArrayBuffer(buffer) + }), { + type: (headersObj['content-type'] || {}).value || '' + }) + } else { + // get utf8 string + body = Buffer.concat(self._body).toString() + } + } + + // create flattened list of headers + var headersList = [] + Object.keys(headersObj).forEach(function (keyName) { + var name = headersObj[keyName].name + var value = headersObj[keyName].value + if (Array.isArray(value)) { + value.forEach(function (v) { + headersList.push([name, v]) + }) + } else { + headersList.push([name, value]) + } + }) + + if (self._mode === 'fetch') { + var signal = null + var fetchTimer = null + if (capability.abortController) { + var controller = new AbortController() + signal = controller.signal + self._fetchAbortController = controller + + if ('requestTimeout' in opts && opts.requestTimeout !== 0) { + self._fetchTimer = global.setTimeout(function () { + self.emit('requestTimeout') + if (self._fetchAbortController) + self._fetchAbortController.abort() + }, opts.requestTimeout) + } + } + + global.fetch(self._opts.url, { + method: self._opts.method, + headers: headersList, + body: body || undefined, + mode: 'cors', + credentials: opts.withCredentials ? 'include' : 'same-origin', + signal: signal + }).then(function (response) { + self._fetchResponse = response + self._connect() + }, function (reason) { + global.clearTimeout(self._fetchTimer) + if (!self._destroyed) + self.emit('error', reason) + }) + } else { + var xhr = self._xhr = new global.XMLHttpRequest() + try { + xhr.open(self._opts.method, self._opts.url, true) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + + // Can't set responseType on really old browsers + if ('responseType' in xhr) + xhr.responseType = self._mode.split(':')[0] + + if ('withCredentials' in xhr) + xhr.withCredentials = !!opts.withCredentials + + if (self._mode === 'text' && 'overrideMimeType' in xhr) + xhr.overrideMimeType('text/plain; charset=x-user-defined') + + if ('requestTimeout' in opts) { + xhr.timeout = opts.requestTimeout + xhr.ontimeout = function () { + self.emit('requestTimeout') + } + } + + headersList.forEach(function (header) { + xhr.setRequestHeader(header[0], header[1]) + }) + + self._response = null + xhr.onreadystatechange = function () { + switch (xhr.readyState) { + case rStates.LOADING: + case rStates.DONE: + self._onXHRProgress() + break + } + } + // Necessary for streaming in Firefox, since xhr.response is ONLY defined + // in onprogress, not in onreadystatechange with xhr.readyState = 3 + if (self._mode === 'moz-chunked-arraybuffer') { + xhr.onprogress = function () { + self._onXHRProgress() + } + } + + xhr.onerror = function () { + if (self._destroyed) + return + self.emit('error', new Error('XHR error')) + } + + try { + xhr.send(body) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + } +} + +/** + * Checks if xhr.status is readable and non-zero, indicating no error. + * Even though the spec says it should be available in readyState 3, + * accessing it throws an exception in IE8 + */ +function statusValid (xhr) { + try { + var status = xhr.status + return (status !== null && status !== 0) + } catch (e) { + return false + } +} + +ClientRequest.prototype._onXHRProgress = function () { + var self = this + + if (!statusValid(self._xhr) || self._destroyed) + return + + if (!self._response) + self._connect() + + self._response._onXHRProgress() +} + +ClientRequest.prototype._connect = function () { + var self = this + + if (self._destroyed) + return + + self._response = new IncomingMessage(self._xhr, self._fetchResponse, self._mode, self._fetchTimer) + self._response.on('error', function(err) { + self.emit('error', err) + }) + + self.emit('response', self._response) +} + +ClientRequest.prototype._write = function (chunk, encoding, cb) { + var self = this + + self._body.push(chunk) + cb() +} + +ClientRequest.prototype.abort = ClientRequest.prototype.destroy = function () { + var self = this + self._destroyed = true + global.clearTimeout(self._fetchTimer) + if (self._response) + self._response._destroyed = true + if (self._xhr) + self._xhr.abort() + else if (self._fetchAbortController) + self._fetchAbortController.abort() +} + +ClientRequest.prototype.end = function (data, encoding, cb) { + var self = this + if (typeof data === 'function') { + cb = data + data = undefined + } + + stream.Writable.prototype.end.call(self, data, encoding, cb) +} + +ClientRequest.prototype.flushHeaders = function () {} +ClientRequest.prototype.setTimeout = function () {} +ClientRequest.prototype.setNoDelay = function () {} +ClientRequest.prototype.setSocketKeepAlive = function () {} + +// Taken from http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader%28%29-method +var unsafeHeaders = [ + 'accept-charset', + 'accept-encoding', + 'access-control-request-headers', + 'access-control-request-method', + 'connection', + 'content-length', + 'cookie', + 'cookie2', + 'date', + 'dnt', + 'expect', + 'host', + 'keep-alive', + 'origin', + 'referer', + 'te', + 'trailer', + 'transfer-encoding', + 'upgrade', + 'via' +] + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(3).Buffer, __webpack_require__(0), __webpack_require__(1))) + +/***/ }), +/* 33 */ +/***/ (function(module, exports) { + +/* (ignored) */ + +/***/ }), +/* 34 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = __webpack_require__(7).Buffer; +var util = __webpack_require__(35); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + if (this.length === 1) return this.head.data; + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} + +/***/ }), +/* 35 */ +/***/ (function(module, exports) { + +/* (ignored) */ + +/***/ }), +/* 36 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global) {var scope = (typeof global !== "undefined" && global) || + (typeof self !== "undefined" && self) || + window; +var apply = Function.prototype.apply; + +// DOM APIs, for completeness + +exports.setTimeout = function() { + return new Timeout(apply.call(setTimeout, scope, arguments), clearTimeout); +}; +exports.setInterval = function() { + return new Timeout(apply.call(setInterval, scope, arguments), clearInterval); +}; +exports.clearTimeout = +exports.clearInterval = function(timeout) { + if (timeout) { + timeout.close(); + } +}; + +function Timeout(id, clearFn) { + this._id = id; + this._clearFn = clearFn; +} +Timeout.prototype.unref = Timeout.prototype.ref = function() {}; +Timeout.prototype.close = function() { + this._clearFn.call(scope, this._id); +}; + +// Does not start the time, just sets up the members needed. +exports.enroll = function(item, msecs) { + clearTimeout(item._idleTimeoutId); + item._idleTimeout = msecs; +}; + +exports.unenroll = function(item) { + clearTimeout(item._idleTimeoutId); + item._idleTimeout = -1; +}; + +exports._unrefActive = exports.active = function(item) { + clearTimeout(item._idleTimeoutId); + + var msecs = item._idleTimeout; + if (msecs >= 0) { + item._idleTimeoutId = setTimeout(function onTimeout() { + if (item._onTimeout) + item._onTimeout(); + }, msecs); + } +}; + +// setimmediate attaches itself to the global object +__webpack_require__(37); +// On some exotic environments, it's not clear which object `setimmediate` was +// able to install onto. Search each possibility in the same order as the +// `setimmediate` library. +exports.setImmediate = (typeof self !== "undefined" && self.setImmediate) || + (typeof global !== "undefined" && global.setImmediate) || + (this && this.setImmediate); +exports.clearImmediate = (typeof self !== "undefined" && self.clearImmediate) || + (typeof global !== "undefined" && global.clearImmediate) || + (this && this.clearImmediate); + +/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(0))) + +/***/ }), +/* 37 */ +/***/ (function(module, exports, __webpack_require__) { + +/* WEBPACK VAR INJECTION */(function(global, process) {(function (global, undefined) { + "use strict"; + + if (global.setImmediate) { + return; + } + + var nextHandle = 1; // Spec says greater than zero + var tasksByHandle = {}; + var currentlyRunningATask = false; + var doc = global.document; + var registerImmediate; + + function setImmediate(callback) { + // Callback can either be a function or a string + if (typeof callback !== "function") { + callback = new Function("" + callback); + } + // Copy function arguments + var args = new Array(arguments.length - 1); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i + 1]; + } + // Store and register the task + var task = { callback: callback, args: args }; + tasksByHandle[nextHandle] = task; + registerImmediate(nextHandle); + return nextHandle++; + } + + function clearImmediate(handle) { + delete tasksByHandle[handle]; + } + + function run(task) { + var callback = task.callback; + var args = task.args; + switch (args.length) { + case 0: + callback(); + break; + case 1: + callback(args[0]); + break; + case 2: + callback(args[0], args[1]); + break; + case 3: + callback(args[0], args[1], args[2]); + break; + default: + callback.apply(undefined, args); + break; + } + } + + function runIfPresent(handle) { + // From the spec: "Wait until any invocations of this algorithm started before this one have completed." + // So if we're currently running a task, we'll need to delay this invocation. + if (currentlyRunningATask) { + // Delay by doing a setTimeout. setImmediate was tried instead, but in Firefox 7 it generated a + // "too much recursion" error. + setTimeout(runIfPresent, 0, handle); + } else { + var task = tasksByHandle[handle]; + if (task) { + currentlyRunningATask = true; + try { + run(task); + } finally { + clearImmediate(handle); + currentlyRunningATask = false; + } + } + } + } + + function installNextTickImplementation() { + registerImmediate = function(handle) { + process.nextTick(function () { runIfPresent(handle); }); + }; + } + + function canUsePostMessage() { + // The test against `importScripts` prevents this implementation from being installed inside a web worker, + // where `global.postMessage` means something completely different and can't be used for this purpose. + if (global.postMessage && !global.importScripts) { + var postMessageIsAsynchronous = true; + var oldOnMessage = global.onmessage; + global.onmessage = function() { + postMessageIsAsynchronous = false; + }; + global.postMessage("", "*"); + global.onmessage = oldOnMessage; + return postMessageIsAsynchronous; + } + } + + function installPostMessageImplementation() { + // Installs an event handler on `global` for the `message` event: see + // * https://developer.mozilla.org/en/DOM/window.postMessage + // * http://www.whatwg.org/specs/web-apps/current-work/multipage/comms.html#crossDocumentMessages + + var messagePrefix = "setImmediate$" + Math.random() + "$"; + var onGlobalMessage = function(event) { + if (event.source === global && + typeof event.data === "string" && + event.data.indexOf(messagePrefix) === 0) { + runIfPresent(+event.data.slice(messagePrefix.length)); + } + }; + + if (global.addEventListener) { + global.addEventListener("message", onGlobalMessage, false); + } else { + global.attachEvent("onmessage", onGlobalMessage); + } + + registerImmediate = function(handle) { + global.postMessage(messagePrefix + handle, "*"); + }; + } + + function installMessageChannelImplementation() { + var channel = new MessageChannel(); + channel.port1.onmessage = function(event) { + var handle = event.data; + runIfPresent(handle); + }; + + registerImmediate = function(handle) { + channel.port2.postMessage(handle); + }; + } + + function installReadyStateChangeImplementation() { + var html = doc.documentElement; + registerImmediate = function(handle) { + // Create a + + + diff --git a/node_modules/smee-client/node_modules/eventsource/example/sse-client.js b/node_modules/smee-client/node_modules/eventsource/example/sse-client.js new file mode 100644 index 0000000..72c49a9 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/example/sse-client.js @@ -0,0 +1,5 @@ +var EventSource = require('..') +var es = new EventSource('http://localhost:8080/sse') +es.addEventListener('server-time', function (e) { + console.log(e.data) +}) diff --git a/node_modules/smee-client/node_modules/eventsource/example/sse-server.js b/node_modules/smee-client/node_modules/eventsource/example/sse-server.js new file mode 100644 index 0000000..034e3b4 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/example/sse-server.js @@ -0,0 +1,29 @@ +const express = require('express') +const serveStatic = require('serve-static') +const SseStream = require('ssestream') + +const app = express() +app.use(serveStatic(__dirname)) +app.get('/sse', (req, res) => { + console.log('new connection') + + const sseStream = new SseStream(req) + sseStream.pipe(res) + const pusher = setInterval(() => { + sseStream.write({ + event: 'server-time', + data: new Date().toTimeString() + }) + }, 1000) + + res.on('close', () => { + console.log('lost connection') + clearInterval(pusher) + sseStream.unpipe(res) + }) +}) + +app.listen(8080, (err) => { + if (err) throw err + console.log('server ready on http://localhost:8080') +}) diff --git a/node_modules/smee-client/node_modules/eventsource/lib/eventsource-polyfill.js b/node_modules/smee-client/node_modules/eventsource/lib/eventsource-polyfill.js new file mode 100644 index 0000000..6ed4396 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/lib/eventsource-polyfill.js @@ -0,0 +1,9 @@ +var EventSource = require('./eventsource') + +if (typeof window === 'object') { + window.EventSourcePolyfill = EventSource + if (!window.EventSource) window.EventSource = EventSource + module.exports = window.EventSource +} else { + module.exports = EventSource +} diff --git a/node_modules/smee-client/node_modules/eventsource/lib/eventsource.js b/node_modules/smee-client/node_modules/eventsource/lib/eventsource.js new file mode 100644 index 0000000..d97157e --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/lib/eventsource.js @@ -0,0 +1,485 @@ +var parse = require('url').parse +var events = require('events') +var https = require('https') +var http = require('http') +var util = require('util') + +var httpsOptions = [ + 'pfx', 'key', 'passphrase', 'cert', 'ca', 'ciphers', + 'rejectUnauthorized', 'secureProtocol', 'servername', 'checkServerIdentity' +] + +var bom = [239, 187, 191] +var colon = 58 +var space = 32 +var lineFeed = 10 +var carriageReturn = 13 + +function hasBom (buf) { + return bom.every(function (charCode, index) { + return buf[index] === charCode + }) +} + +/** + * Creates a new EventSource object + * + * @param {String} url the URL to which to connect + * @param {Object} [eventSourceInitDict] extra init params. See README for details. + * @api public + **/ +function EventSource (url, eventSourceInitDict) { + var readyState = EventSource.CONNECTING + var headers = eventSourceInitDict && eventSourceInitDict.headers + var hasNewOrigin = false + Object.defineProperty(this, 'readyState', { + get: function () { + return readyState + } + }) + + Object.defineProperty(this, 'url', { + get: function () { + return url + } + }) + + var self = this + self.reconnectInterval = 1000 + self.connectionInProgress = false + + function onConnectionClosed (message) { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CONNECTING + _emit('error', new Event('error', {message: message})) + + // The url may have been changed by a temporary redirect. If that's the case, + // revert it now, and flag that we are no longer pointing to a new origin + if (reconnectUrl) { + url = reconnectUrl + reconnectUrl = null + hasNewOrigin = false + } + setTimeout(function () { + if (readyState !== EventSource.CONNECTING || self.connectionInProgress) { + return + } + self.connectionInProgress = true + connect() + }, self.reconnectInterval) + } + + var req + var lastEventId = '' + if (headers && headers['Last-Event-ID']) { + lastEventId = headers['Last-Event-ID'] + delete headers['Last-Event-ID'] + } + + var discardTrailingNewline = false + var data = '' + var eventName = '' + + var reconnectUrl = null + + function connect () { + var options = parse(url) + var isSecure = options.protocol === 'https:' + options.headers = { 'Cache-Control': 'no-cache', 'Accept': 'text/event-stream' } + if (lastEventId) options.headers['Last-Event-ID'] = lastEventId + if (headers) { + var reqHeaders = hasNewOrigin ? removeUnsafeHeaders(headers) : headers + for (var i in reqHeaders) { + var header = reqHeaders[i] + if (header) { + options.headers[i] = header + } + } + } + + // Legacy: this should be specified as `eventSourceInitDict.https.rejectUnauthorized`, + // but for now exists as a backwards-compatibility layer + options.rejectUnauthorized = !(eventSourceInitDict && !eventSourceInitDict.rejectUnauthorized) + + if (eventSourceInitDict && eventSourceInitDict.createConnection !== undefined) { + options.createConnection = eventSourceInitDict.createConnection + } + + // If specify http proxy, make the request to sent to the proxy server, + // and include the original url in path and Host headers + var useProxy = eventSourceInitDict && eventSourceInitDict.proxy + if (useProxy) { + var proxy = parse(eventSourceInitDict.proxy) + isSecure = proxy.protocol === 'https:' + + options.protocol = isSecure ? 'https:' : 'http:' + options.path = url + options.headers.Host = options.host + options.hostname = proxy.hostname + options.host = proxy.host + options.port = proxy.port + } + + // If https options are specified, merge them into the request options + if (eventSourceInitDict && eventSourceInitDict.https) { + for (var optName in eventSourceInitDict.https) { + if (httpsOptions.indexOf(optName) === -1) { + continue + } + + var option = eventSourceInitDict.https[optName] + if (option !== undefined) { + options[optName] = option + } + } + } + + // Pass this on to the XHR + if (eventSourceInitDict && eventSourceInitDict.withCredentials !== undefined) { + options.withCredentials = eventSourceInitDict.withCredentials + } + + req = (isSecure ? https : http).request(options, function (res) { + self.connectionInProgress = false + // Handle HTTP errors + if (res.statusCode === 500 || res.statusCode === 502 || res.statusCode === 503 || res.statusCode === 504) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + onConnectionClosed() + return + } + + // Handle HTTP redirects + if (res.statusCode === 301 || res.statusCode === 302 || res.statusCode === 307) { + var location = res.headers.location + if (!location) { + // Server sent redirect response without Location header. + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return + } + var prevOrigin = getOrigin(url) + var nextOrigin = getOrigin(location) + hasNewOrigin = prevOrigin !== nextOrigin + if (res.statusCode === 307) reconnectUrl = url + url = location + process.nextTick(connect) + return + } + + if (res.statusCode !== 200) { + _emit('error', new Event('error', {status: res.statusCode, message: res.statusMessage})) + return self.close() + } + + readyState = EventSource.OPEN + res.on('close', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + + res.on('end', function () { + res.removeAllListeners('close') + res.removeAllListeners('end') + onConnectionClosed() + }) + _emit('open', new Event('open')) + + // text/event-stream parser adapted from webkit's + // Source/WebCore/page/EventSource.cpp + var isFirst = true + var buf + var startingPos = 0 + var startingFieldLength = -1 + res.on('data', function (chunk) { + buf = buf ? Buffer.concat([buf, chunk]) : chunk + if (isFirst && hasBom(buf)) { + buf = buf.slice(bom.length) + } + + isFirst = false + var pos = 0 + var length = buf.length + + while (pos < length) { + if (discardTrailingNewline) { + if (buf[pos] === lineFeed) { + ++pos + } + discardTrailingNewline = false + } + + var lineLength = -1 + var fieldLength = startingFieldLength + var c + + for (var i = startingPos; lineLength < 0 && i < length; ++i) { + c = buf[i] + if (c === colon) { + if (fieldLength < 0) { + fieldLength = i - pos + } + } else if (c === carriageReturn) { + discardTrailingNewline = true + lineLength = i - pos + } else if (c === lineFeed) { + lineLength = i - pos + } + } + + if (lineLength < 0) { + startingPos = length - pos + startingFieldLength = fieldLength + break + } else { + startingPos = 0 + startingFieldLength = -1 + } + + parseEventStreamLine(buf, pos, fieldLength, lineLength) + + pos += lineLength + 1 + } + + if (pos === length) { + buf = void 0 + } else if (pos > 0) { + buf = buf.slice(pos) + } + }) + }) + + req.on('error', function (err) { + self.connectionInProgress = false + onConnectionClosed(err.message) + }) + + if (req.setNoDelay) req.setNoDelay(true) + req.end() + } + + connect() + + function _emit () { + if (self.listeners(arguments[0]).length > 0) { + self.emit.apply(self, arguments) + } + } + + this._close = function () { + if (readyState === EventSource.CLOSED) return + readyState = EventSource.CLOSED + if (req.abort) req.abort() + if (req.xhr && req.xhr.abort) req.xhr.abort() + } + + function parseEventStreamLine (buf, pos, fieldLength, lineLength) { + if (lineLength === 0) { + if (data.length > 0) { + var type = eventName || 'message' + _emit(type, new MessageEvent(type, { + data: data.slice(0, -1), // remove trailing newline + lastEventId: lastEventId, + origin: getOrigin(url) + })) + data = '' + } + eventName = void 0 + } else if (fieldLength > 0) { + var noValue = fieldLength < 0 + var step = 0 + var field = buf.slice(pos, pos + (noValue ? lineLength : fieldLength)).toString() + + if (noValue) { + step = lineLength + } else if (buf[pos + fieldLength + 1] !== space) { + step = fieldLength + 1 + } else { + step = fieldLength + 2 + } + pos += step + + var valueLength = lineLength - step + var value = buf.slice(pos, pos + valueLength).toString() + + if (field === 'data') { + data += value + '\n' + } else if (field === 'event') { + eventName = value + } else if (field === 'id') { + lastEventId = value + } else if (field === 'retry') { + var retry = parseInt(value, 10) + if (!Number.isNaN(retry)) { + self.reconnectInterval = retry + } + } + } + } +} + +module.exports = EventSource + +util.inherits(EventSource, events.EventEmitter) +EventSource.prototype.constructor = EventSource; // make stacktraces readable + +['open', 'error', 'message'].forEach(function (method) { + Object.defineProperty(EventSource.prototype, 'on' + method, { + /** + * Returns the current listener + * + * @return {Mixed} the set function or undefined + * @api private + */ + get: function get () { + var listener = this.listeners(method)[0] + return listener ? (listener._listener ? listener._listener : listener) : undefined + }, + + /** + * Start listening for events + * + * @param {Function} listener the listener + * @return {Mixed} the set function or undefined + * @api private + */ + set: function set (listener) { + this.removeAllListeners(method) + this.addEventListener(method, listener) + } + }) +}) + +/** + * Ready states + */ +Object.defineProperty(EventSource, 'CONNECTING', {enumerable: true, value: 0}) +Object.defineProperty(EventSource, 'OPEN', {enumerable: true, value: 1}) +Object.defineProperty(EventSource, 'CLOSED', {enumerable: true, value: 2}) + +EventSource.prototype.CONNECTING = 0 +EventSource.prototype.OPEN = 1 +EventSource.prototype.CLOSED = 2 + +/** + * Closes the connection, if one is made, and sets the readyState attribute to 2 (closed) + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventSource/close + * @api public + */ +EventSource.prototype.close = function () { + this._close() +} + +/** + * Emulates the W3C Browser based WebSocket interface using addEventListener. + * + * @param {String} type A string representing the event type to listen out for + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.addEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.addEventListener = function addEventListener (type, listener) { + if (typeof listener === 'function') { + // store a reference so we can return the original function again + listener._listener = listener + this.on(type, listener) + } +} + +/** + * Emulates the W3C Browser based WebSocket interface using dispatchEvent. + * + * @param {Event} event An event to be dispatched + * @see https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/dispatchEvent + * @api public + */ +EventSource.prototype.dispatchEvent = function dispatchEvent (event) { + if (!event.type) { + throw new Error('UNSPECIFIED_EVENT_TYPE_ERR') + } + // if event is instance of an CustomEvent (or has 'details' property), + // send the detail object as the payload for the event + this.emit(event.type, event.detail) +} + +/** + * Emulates the W3C Browser based WebSocket interface using removeEventListener. + * + * @param {String} type A string representing the event type to remove + * @param {Function} listener callback + * @see https://developer.mozilla.org/en/DOM/element.removeEventListener + * @see http://dev.w3.org/html5/websockets/#the-websocket-interface + * @api public + */ +EventSource.prototype.removeEventListener = function removeEventListener (type, listener) { + if (typeof listener === 'function') { + listener._listener = undefined + this.removeListener(type, listener) + } +} + +/** + * W3C Event + * + * @see http://www.w3.org/TR/DOM-Level-3-Events/#interface-Event + * @api private + */ +function Event (type, optionalProperties) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + if (optionalProperties) { + for (var f in optionalProperties) { + if (optionalProperties.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: optionalProperties[f], enumerable: true }) + } + } + } +} + +/** + * W3C MessageEvent + * + * @see http://www.w3.org/TR/webmessaging/#event-definitions + * @api private + */ +function MessageEvent (type, eventInitDict) { + Object.defineProperty(this, 'type', { writable: false, value: type, enumerable: true }) + for (var f in eventInitDict) { + if (eventInitDict.hasOwnProperty(f)) { + Object.defineProperty(this, f, { writable: false, value: eventInitDict[f], enumerable: true }) + } + } +} + +/** + * Returns a new object of headers that does not include any authorization and cookie headers + * + * @param {Object} headers An object of headers ({[headerName]: headerValue}) + * @return {Object} a new object of headers + * @api private + */ +function removeUnsafeHeaders (headers) { + var safe = {} + for (var key in headers) { + if (/^(cookie|authorization)$/i.test(key)) { + continue + } + + safe[key] = headers[key] + } + + return safe +} + +/** + * Transform an URL to a valid origin value. + * + * @param {String|Object} url URL to transform to it's origin. + * @returns {String} The origin. + * @api private + */ +function getOrigin (url) { + if (typeof url === 'string') url = parse(url) + if (!url.protocol || !url.hostname) return 'null' + return (url.protocol + '//' + url.host).toLowerCase() +} diff --git a/node_modules/smee-client/node_modules/eventsource/package.json b/node_modules/smee-client/node_modules/eventsource/package.json new file mode 100644 index 0000000..dc4ee49 --- /dev/null +++ b/node_modules/smee-client/node_modules/eventsource/package.json @@ -0,0 +1,56 @@ +{ + "name": "eventsource", + "version": "1.1.2", + "description": "W3C compliant EventSource client for Node.js and browser (polyfill)", + "keywords": [ + "eventsource", + "http", + "streaming", + "sse", + "polyfill" + ], + "homepage": "http://github.com/EventSource/eventsource", + "author": "Aslak Hellesøy ", + "repository": { + "type": "git", + "url": "git://github.com/EventSource/eventsource.git" + }, + "bugs": { + "url": "http://github.com/EventSource/eventsource/issues" + }, + "directories": { + "lib": "./lib" + }, + "main": "./lib/eventsource", + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/EventSource/eventsource/raw/master/LICENSE" + } + ], + "devDependencies": { + "buffer-from": "^1.1.1", + "express": "^4.15.3", + "mocha": "^3.5.3", + "nyc": "^11.2.1", + "serve-static": "^1.12.3", + "ssestream": "^1.0.0", + "standard": "^10.0.2", + "webpack": "^3.5.6" + }, + "scripts": { + "test": "mocha --reporter spec && standard", + "polyfill": "webpack lib/eventsource-polyfill.js example/eventsource-polyfill.js", + "postpublish": "git push && git push --tags", + "coverage": "nyc --reporter=html --reporter=text _mocha --reporter spec" + }, + "engines": { + "node": ">=0.12.0" + }, + "standard": { + "ignore": [ + "example/eventsource-polyfill.js" + ] + } +} diff --git a/node_modules/smee-client/package.json b/node_modules/smee-client/package.json new file mode 100644 index 0000000..d7d397f --- /dev/null +++ b/node_modules/smee-client/package.json @@ -0,0 +1,54 @@ +{ + "name": "smee-client", + "version": "1.2.3", + "description": "Client to proxy webhooks to local host", + "main": "index.js", + "bin": { + "smee": "./bin/smee.js" + }, + "files": [ + "index.js", + "index.d.ts", + "bin", + "lib" + ], + "scripts": { + "test": "jest --coverage && standard", + "build": "tsc -p tsconfig.json" + }, + "repository": "github:probot/smee-client", + "author": "", + "license": "ISC", + "dependencies": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "devDependencies": { + "@babel/core": "^7.4.0", + "@types/eventsource": "^1.1.8", + "@types/jest": "^24.0.11", + "@types/nock": "^9.3.1", + "@types/superagent": "^4.1.15", + "@types/validator": "^10.11.0", + "babel-core": "^7.0.0-bridge.0", + "babel-jest": "^24.5.0", + "connect-sse": "^1.2.0", + "jest": "^24.5.0", + "nock": "^10.0.6", + "standard": "^12.0.1", + "supertest": "^4.0.2", + "ts-jest": "^24.0.1", + "typescript": "^3.4.1" + }, + "standard": { + "env": [ + "jest" + ] + }, + "jest": { + "preset": "ts-jest" + } +} diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/string_decoder/node_modules/safe-buffer/LICENSE b/node_modules/string_decoder/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/string_decoder/node_modules/safe-buffer/README.md b/node_modules/string_decoder/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.js b/node_modules/string_decoder/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..f8d3ec9 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/string_decoder/node_modules/safe-buffer/package.json b/node_modules/string_decoder/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..f2869e2 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 0000000..b2bb141 --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,34 @@ +{ + "name": "string_decoder", + "version": "1.3.0", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "files": [ + "lib" + ], + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/superagent/HISTORY.md b/node_modules/superagent/HISTORY.md new file mode 100644 index 0000000..0eef03c --- /dev/null +++ b/node_modules/superagent/HISTORY.md @@ -0,0 +1,692 @@ +# This HISTORY log is deprecated + +Please see [GitHub releases page](https://github.com/visionmedia/superagent/releases) for the current changelog. + +# 4.1.0 (2018-12-26) + + * `.connect()` IP/DNS override option (Kornel) + * `.trustLocalhost()` option for allowing broken HTTPS on `localhost` + * `.abort()` used with promises rejects the promise. + +# 4.0.0 (2018-11-17) + +## Breaking changes + +* Node.js v4 has reached it's end of life, so we no longer support it. It's v6+ or later. We recommend Node.js 10. +* We now use ES6 in the browser code, too. + * If you're using Browserify or Webpack to package code for Internet Explorer, you will also have to use Babel. + * The pre-built node_modules/superagent.js is still ES5-compatible. +* `.end(…)` returns `undefined` instead of the request. If you need the request object after calling `.end()` (and you probably don't), save it in a variable and call `request.end(…)`. Consider not using `.end()` at all, and migrating to promises by calling `.then()` instead. +* In Node, responses with unknown MIME type are buffered by default. To get old behavior, if you use custom _unbuffered_ parsers, add `.buffer(false)` to requests or set `superagent.buffer[yourMimeType] = false`. +* Invalid uses of `.pipe()` throw. + + +## Minor changes + +* Throw if `req.abort().end()` is called +* Throw if using unsupported mix of send and field +* Reject `.end()` promise on all error events (Kornel Lesiński) +* Set `https.servername` from the `Host` header (Kornel Lesiński) +* Leave backticks unencoded in query strings where possible (Ethan Resnick) +* Update node-mime to 2.x (Alexey Kucherenko) +* Allow default buffer settings based on response-type (shrey) +* `response.buffered` is more accurate. + +# 3.8.3 (2018-04-29) + +* Add flags for 201 & 422 responses (Nikhil Fadnis) +* Emit progress event while uploading Node `Buffer` via send method (Sergey Akhalkov) +* Fixed setting correct cookies for redirects (Damien Clark) +* Replace .catch with ['catch'] for IE9 Support (Miguel Stevens) + +# 3.8.2 (2017-12-09) + +* Fixed handling of exceptions thrown from callbacks +* Stricter matching of `+json` MIME types. + +# 3.8.1 (2017-11-08) + +* Clear authorization header on cross-domain redirect + +# 3.8.0 + +* Added support for "globally" defined headers and event handlers via `superagent.agent()`. It now remembers default settings for all its requests. +* Added optional callback to `.retry()` (Alexander Murphy) +* Unified auth args handling in node/browser (Edmundo Alvarez) +* Fixed error handling in zlib pipes (Kornel) +* Documented that 3xx status codes are errors (Mickey Reiss) + +# 3.7.0 (2017-10-17) + +* Limit maximum response size. Prevents zip bombs (Kornel) +* Catch and pass along errors in `.ok()` callback (Jeremy Ruppel) +* Fixed parsing of XHR headers without a newline (nsf) + +# 3.6.2 (2017-10-02) + +* Upgrade MIME type dependency to a newer, secure version +* Recognize PDF MIME as binary +* Fix for error in subsequent require() calls (Steven de Salas) + +# 3.6.0 (2017-08-20) + +* Support disabling TCP_NODELAY option ([#1240](https://github.com/visionmedia/superagent/issues/1240)) (xiamengyu) +* Send payload in query string for GET and HEAD shorthand API (Peter Lyons) +* Support passphrase with pfx certificate (Paul Westerdale (ABRS Limited)) +* Documentation improvements (Peter Lyons) +* Fixed duplicated query string params ([#1200](https://github.com/visionmedia/superagent/issues/1200)) (Kornel) + +# 3.5.1 (2017-03-18) + +* Allow crossDomain errors to be retried ([#1194](https://github.com/visionmedia/superagent/issues/1194)) (Michael Olson) +* Read responseType property from the correct object (Julien Dupouy) +* Check for ownProperty before adding header (Lucas Vieira) + +# 3.5.0 (2017-02-23) + +* Add errno to distinguish between request timeout and body download timeout ([#1184](https://github.com/visionmedia/superagent/issues/1184)) (Kornel Lesiński) +* Warn about bogus timeout options ([#1185](https://github.com/visionmedia/superagent/issues/1185)) (Kornel Lesiński) + +# 3.4.4 (2017-02-17) + +* Treat videos like images (Kornel Lesiński) +* Avoid renaming module (Kornel Lesiński) + +# 3.4.3 (2017-02-14) + +* Fixed being able to define own parsers when their mime type starts with `text/` (Damien Clark) +* `withCredentials(false)` (Andy Woods) +* Use `formData.on` instead of `.once` (Kornel Lesiński) +* Ignore `attach("file",null)` (Kornel Lesiński) + +# 3.4.1 (2017-01-29) + +* Allow `retry()` and `retry(0)` (Alexander Pope) +* Allow optional body/data in DELETE requests (Alpha Shuro) +* Fixed query string on retried requests (Kornel Lesiński) + +# 3.4.0 (2017-01-25) + +* New `.retry(n)` method and `err.retries` (Alexander Pope) +* Docs for HTTPS request (Jun Wan Goh) + +# 3.3.1 (2016-12-17) + +* Fixed "double callback bug" warning on timeouts of gzipped responses + +# 3.3.0 (2016-12-14) + +* Added `.ok(callback)` that allows customizing which responses are errors (Kornel Lesiński) +* Added `.responseType()` to Node version (Kornel Lesiński) +* Added `.parse()` to browser version (jakepearson) +* Fixed parse error when using `responseType('blob')` (Kornel Lesiński) + +# 3.2.0 (2016-12-11) + +* Added `.timeout({response:ms})`, which allows limiting maximum response time independently from total download time (Kornel Lesiński) +* Added warnings when `.end()` is called more than once (Kornel Lesiński) +* Added `response.links` to browser version (Lukas Eipert) +* `btoa` is no longer required in IE9 (Kornel Lesiński) +* Fixed `.sortQuery()` on URLs without query strings (Kornel Lesiński) +* Refactored common response code into `ResponseBase` (Lukas Eipert) + +# 3.1.0 (2016-11-28) + +* Added `.sortQuery()` (vicanso) +* Added support for arrays and bools in `.field()` (Kornel Lesiński) +* Made `superagent.Request` subclassable without need to patch all static methods (Kornel Lesiński) + +# 3.0.0 (2016-11-19) + +* Dropped support for Node 0.x. Please upgrade to at least Node 4. +* Dropped support for componentjs (Damien Caselli) +* Removed deprecated `.part()`/`superagent.Part` APIs. +* Removed unreliable `.body` property on internal response object used by unbuffered parsers. + Note: the normal `response.body` is unaffected. +* Multiple `.send()` calls mixing `Buffer`/`Blob` and JSON data are not possible and will now throw instead of messing up the data. +* Improved `.send()` data object type check (Fernando Mendes) +* Added common prototype for Node and browser versions (Andreas Helmberger) +* Added `http+unix:` schema to support Unix sockets (Yuki KAN) +* Added full `attach` options parameter in the Node version (Lapo Luchini) +* Added `pfx` TLS option with new `pfx()` method. (Reid Burke) +* Internally changed `.on` to `.once` to prevent possible memory leaks (Matt Blair) +* Made all errors reported as an event (Kornel Lesiński) + +# 2.3.0 (2016-09-20) + +* Enabled `.field()` to handle objects (Affan Shahid) +* Added authentication with client certificates (terusus) +* Added `.catch()` for more Promise-like interface (Maxim Samoilov, Kornel Lesiński) +* Silenced errors from incomplete gzip streams for compatibility with web browsers (Kornel Lesiński) +* Fixed `event.direction` in uploads (Kornel Lesiński) +* Fixed returned value of overwritten response object's `on()` method (Juan Dopazo) + +# 2.2.0 (2016-08-13) + +* Added `timedout` property to node Request instance (Alexander Pope) +* Unified `null` querystring values in node and browser environments. (George Chung) + +# 2.1.0 (2016-06-14) + +* Refactored async parsers. Now the `end` callback waits for async parsers to finish (Kornel Lesiński) +* Errors thrown in `.end()` callback don't cause the callback to be called twice (Kornel Lesiński) +* Added `headers` to `toJSON()` (Tao) + +# 2.0.0 (2016-05-29) + + +## Breaking changes + +Breaking changes are in rarely used functionality, so we hope upgrade will be smooth for most users. + +* Browser: The `.parse()` method has been renamed to `.serialize()` for consistency with NodeJS version. +* Browser: Query string keys without a value used to be parsed as `'undefined'`, now their value is `''` (empty string) (shura, Kornel Lesiński). +* NodeJS: The `redirect` event is called after new query string and headers have been set and is allowed to override the request URL (Kornel Lesiński) +* `.then()` returns a real `Promise`. Note that use of superagent with promises now requires a global `Promise` object. + If you target Internet Explorer or Node 0.10, you'll need `require('es6-promise').polyfill()` or similar. +* Upgraded all dependencies (Peter Lyons) +* Renamed properties documented as `@api private` to have `_prefixed` names (Kornel Lesiński) + + +## Probably not breaking changes: + +* Extracted common functions to request-base (Peter Lyons) +* Fixed race condition in pipe tests (Peter Lyons) +* Handle `FormData` error events (scriptype) +* Fixed wrong jsdoc of Request#attach (George Chung) +* Updated and improved tests (Peter Lyons) +* `request.head()` supports `.redirects(5)` call (Kornel Lesiński) +* `response` event is also emitted when using `.pipe()` + +# 1.8.2 (2016-03-20) + +* Fixed handling of HTTP status 204 with content-encoding: gzip (Andrew Shelton) +* Handling of FormData error events (scriptype) +* Fixed parsing of `vnd+json` MIME types (Kornel Lesiński) +* Aliased browser implementation of `.parse()` as `.serialize()` for forward compatibility + +# 1.8.1 (2016-03-14) + +* Fixed form-data incompatibility with IE9 + +# 1.8.0 (2016-03-09) + +* Extracted common code into request-base class (Peter Lyons) + * It does not affect the public API, but please let us know if you notice any plugins/subclasses breaking! +* Added option `{type:'auto'}` to `auth` method, which enables browser-native auth types (Jungle, Askar Yusupov) +* Added `responseType()` to set XHR `responseType` (chris) +* Switched to form-data for browserify-compatible `FormData` (Peter Lyons) +* Added `statusCode` to error response when JSON response is malformed (mattdell) +* Prevented TCP port conflicts in all tests (Peter Lyons) +* Updated form-data dependency + +# 1.7.2 (2016-01-26) + +* Fix case-sensitivity of header fields introduced by [`a4ddd6a`](https://github.com/visionmedia/superagent/commit/a4ddd6a). (Edward J. Jinotti) +* bump extend dependency, as former version did not contain any license information (Lukas Eipert) + +# 1.7.1 (2016-01-21) + +* Fixed a conflict with express when using npm 3.x (Glenn) +* Fixed redirects after a multipart/form-data POST request (cyclist2) + +# 1.7.0 (2016-01-18) + +* When attaching files, read default filename from the `File` object (JD Isaacks) +* Add `direction` property to `progress` events (Joseph Dykstra) +* Update component-emitter & formidable (Kornel Lesiński) +* Don't re-encode query string needlessly (Ruben Verborgh) +* ensure querystring is appended when doing `stream.pipe(request)` (Keith Grennan) +* change set header function, not call `this.request()` until call `this.end()` (vicanso) +* Add no-op `withCredentials` to Node API (markdalgleish) +* fix `delete` breaking on ie8 (kenjiokabe) +* Don't let request error override responses (Clay Reimann) +* Increased number of tests shared between node and client (Kornel Lesiński) + +# 1.6.0/1.6.1 (2015-12-09) + +* avoid misleading CORS error message +* added 'progress' event on file/form upload in Node (Olivier Lalonde) +* return raw response if the response parsing fails (Rei Colina) +* parse content-types ending with `+json` as JSON (Eiryyy) +* fix to avoid throwing errors on aborted requests (gjurgens) +* retain cookies on redirect when hosts match (Tom Conroy) +* added Bower manifest (Johnny Freeman) +* upgrade to latest cookiejar (Andy Burke) + +# 1.5.0 (2015-11-30) + +* encode array values as `key=1&key=2&key=3` etc... (aalpern, Davis Kim) +* avoid the error which is omitted from 'socket hang up' +* faster JSON parsing, handling of zlib errors (jbellenger) +* fix IE11 sends 'undefined' string if data was undefined (Vadim Goncharov) +* alias `del()` method as `delete()` (Aaron Krause) +* revert Request#parse since it was actually Response#parse + +# 1.4.0 (2015-09-14) + +* add Request#parse method to client library +* add missing statusCode in client response +* don't apply JSON heuristics if a valid parser is found +* fix detection of root object for webworkers + +# 1.3.0 (2015-08-05) + +* fix incorrect content-length of data set to buffer +* serialize request data takes into account charsets +* add basic promise support via a `then` function + +# 1.2.0 (2015-04-13) + +* add progress events to downlodas +* make usable in webworkers +* add support for 308 redirects +* update node-form-data dependency +* update to work in react native +* update node-mime dependency + +# 1.1.0 (2015-03-13) + +* Fix responseType checks without xhr2 and ie9 tests (rase-) +* errors have .status and .response fields if applicable (defunctzombie) +* fix end callback called before saving cookies (rase-) + +# 1.0.0 / 2015-03-08 + +* All non-200 responses are treated as errors now. (The callback is called with an error when the response has a status < 200 or >= 300 now. In previous versions this would not have raised an error and the client would have to check the `res` object. See [#283](https://github.com/visionmedia/superagent/issues/283). +* keep timeouts intact across redirects (hopkinsth) +* handle falsy json values (themaarten) +* fire response events in browser version (Schoonology) +* getXHR exported in client version (KidsKilla) +* remove arity check on `.end()` callbacks (defunctzombie) +* avoid setting content-type for host objects (rexxars) +* don't index array strings in querystring (travisjeffery) +* fix pipe() with redirects (cyrilis) +* add xhr2 file download (vstirbu) +* set default response type to text/plain if not specified (warrenseine) + +# 0.21.0 / 2014-11-11 + +* Trim text before parsing json (gjohnson) +* Update tests to express 4 (gaastonsr) +* Prevent double callback when error is thrown (pgn-vole) +* Fix missing clearTimeout (nickdima) +* Update debug (TooTallNate) + +# 0.20.0 / 2014-10-02 + +* Add toJSON() to request and response instances. (yields) +* Prevent HEAD requests from getting parsed. (gjohnson) +* Update debug. (TooTallNate) + +# 0.19.1 / 2014-09-24 + +* Fix basic auth issue when password is falsey value. (gjohnson) + +# 0.19.0 / 2014-09-24 + +* Add unset() to browser. (shesek) +* Prefer XHR over ActiveX. (omeid) +* Catch parse errors. (jacwright) +* Update qs dependency. (wercker) +* Add use() to node. (Financial-Times) +* Add response text to errors. (yields) +* Don't send empty cookie headers. (undoZen) +* Don't parse empty response bodies. (DveMac) +* Use hostname when setting cookie host. (prasunsultania) + +# 0.18.2 / 2014-07-12 + +* Handle parser errors. (kof) +* Ensure not to use default parsers when there is a user defined one. (kof) + +# 0.18.1 / 2014-07-05 + +* Upgrade cookiejar dependency (juanpin) +* Support image mime types (nebulade) +* Make .agent chainable (kof) +* Upgrade debug (TooTallNate) +* Fix docs (aheckmann) + +# 0.18.0 / 2014-04-29 + +* Use "form-data" module for the multipart/form-data implementation. (TooTallNate) +* Add basic `field()` and `attach()` functions for HTML5 FormData. (TooTallNate) +* Deprecate `part()`. (TooTallNate) +* Set default user-agent header. (bevacqua) +* Add `unset()` method for removing headers. (bevacqua) +* Update cookiejar. (missinglink) +* Fix response error formatting. (shesek) + +# 0.17.0 / 2014-03-06 + +* supply uri malformed error to the callback (yields) +* add request event (yields) +* allow simple auth (yields) +* add request event (yields) +* switch to component/reduce (visionmedia) +* fix part content-disposition (mscdex) +* add browser testing via zuul (defunctzombie) +* adds request.use() (johntron) + +# 0.16.0 / 2014-01-07 + +* remove support for 0.6 (superjoe30) +* fix CORS withCredentials (wejendorp) +* add "test" script (superjoe30) +* add request .accept() method (nickl-) +* add xml to mime types mappings (nickl-) +* fix parse body error on HEAD requests (gjohnson) +* fix documentation typos (matteofigus) +* fix content-type + charset (bengourley) +* fix null values on query parameters (cristiandouce) + +# 0.15.7 / 2013-10-19 + +* pin should.js to 1.3.0 due to breaking change in 2.0.x +* fix browserify regression + +# 0.15.5 / 2013-10-09 + +* add browser field to support browserify +* fix .field() value number support + +# 0.15.4 / 2013-07-09 + +* node: add a Request#agent() function to set the http Agent to use + +# 0.15.3 / 2013-07-05 + +* fix .pipe() unzipping on more recent nodes. Closes [#240](https://github.com/visionmedia/superagent/issues/240) +* fix passing an empty object to .query() no longer appends "?" +* fix formidable error handling +* update formidable + +# 0.15.2 / 2013-07-02 + +* fix: emit 'end' when piping. + +# 0.15.1 / 2013-06-26 + +* add try/catch around parseLinks + +# 0.15.0 / 2013-06-25 + +* make `Response#toError()` have a more meaningful `message` + +# 0.14.9 / 2013-06-15 + +* add debug()s to the node client +* add .abort() method to node client + +# 0.14.8 / 2013-06-13 + +* set .agent = false always +* remove X-Requested-With. Closes [#189](https://github.com/visionmedia/superagent/issues/189) + +# 0.14.7 / 2013-06-06 + +* fix unzip error handling + +# 0.14.6 / 2013-05-23 + +* fix HEAD unzip bug + +# 0.14.5 / 2013-05-23 + +* add flag to ensure the callback is **never** invoked twice + +# 0.14.4 / 2013-05-22 + +* add superagent.js build output +* update qs +* update emitter-component +* revert "add browser field to support browserify" see [GH-221](https://github.com/visionmedia/superagent/issues/221) + +# 0.14.3 / 2013-05-18 + +* add browser field to support browserify + +# 0.14.2/ 2013-05-07 + +* add host object check to fix serialization of File/Blobs etc as json + +# 0.14.1 / 2013-04-09 + +* update qs + +# 0.14.0 / 2013-04-02 + +* add client-side basic auth +* fix retaining of .set() header field case + +# 0.13.0 / 2013-03-13 + +* add progress events to client +* add simple example +* add res.headers as alias of res.header for browser client +* add res.get(field) to node/client + +# 0.12.4 / 2013-02-11 + +* fix get content-type even if can't get other headers in firefox. fixes [#181](https://github.com/visionmedia/superagent/issues/181) + +# 0.12.3 / 2013-02-11 + +* add quick "progress" event support + +# 0.12.2 / 2013-02-04 + +* add test to check if response acts as a readable stream +* add ReadableStream in the Response prototype. +* add test to assert correct redirection when the host changes in the location header. +* add default Accept-Encoding. Closes [#155](https://github.com/visionmedia/superagent/issues/155) +* fix req.pipe() return value of original stream for node parity. Closes [#171](https://github.com/visionmedia/superagent/issues/171) +* remove the host header when cleaning headers to properly follow the redirection. + +# 0.12.1 / 2013-01-10 + +* add x-domain error handling + +# 0.12.0 / 2013-01-04 + +* add header persistence on redirects + +# 0.11.0 / 2013-01-02 + +* add .error Error object. Closes [#156](https://github.com/visionmedia/superagent/issues/156) +* add forcing of res.text removal for FF HEAD responses. Closes [#162](https://github.com/visionmedia/superagent/issues/162) +* add reduce component usage. Closes [#90](https://github.com/visionmedia/superagent/issues/90) +* move better-assert dep to development deps + +# 0.10.0 / 2012-11-14 + +* add req.timeout(ms) support for the client + +# 0.9.10 / 2012-11-14 + +* fix client-side .query(str) support + +# 0.9.9 / 2012-11-14 + +* add .parse(fn) support +* fix socket hangup with dates in querystring. Closes [#146](https://github.com/visionmedia/superagent/issues/146) +* fix socket hangup "error" event when a callback of arity 2 is provided + +# 0.9.8 / 2012-11-03 + +* add emission of error from `Request#callback()` +* add a better fix for nodes weird socket hang up error +* add PUT/POST/PATCH data support to client short-hand functions +* add .license property to component.json +* change client portion to build using component(1) +* fix GET body support [guille] + +# 0.9.7 / 2012-10-19 + +* fix `.buffer()` `res.text` when no parser matches + +# 0.9.6 / 2012-10-17 + +* change: use `this` when `window` is undefined +* update to new component spec [juliangruber] +* fix emission of "data" events for compressed responses without encoding. Closes [#125](https://github.com/visionmedia/superagent/issues/125) + +# 0.9.5 / 2012-10-01 + +* add field name to .attach() +* add text "parser" +* refactor isObject() +* remove wtf isFunction() helper + +# 0.9.4 / 2012-09-20 + +* fix `Buffer` responses [TooTallNate] +* fix `res.type` when a "type" param is present [TooTallNate] + +# 0.9.3 / 2012-09-18 + +* remove **GET** `.send()` == `.query()` special-case (**API** change !!!) + +# 0.9.2 / 2012-09-17 + +* add `.aborted` prop +* add `.abort()`. Closes [#115](https://github.com/visionmedia/superagent/issues/115) + +# 0.9.1 / 2012-09-07 + +* add `.forbidden` response property +* add component.json +* change emitter-component to 0.0.5 +* fix client-side tests + +# 0.9.0 / 2012-08-28 + +* add `.timeout(ms)`. Closes [#17](https://github.com/visionmedia/superagent/issues/17) + +# 0.8.2 / 2012-08-28 + +* fix pathname relative redirects. Closes [#112](https://github.com/visionmedia/superagent/issues/112) + +# 0.8.1 / 2012-08-21 + +* fix redirects when schema is specified + +# 0.8.0 / 2012-08-19 + +* add `res.buffered` flag +* add buffering of text/\*, json and forms only by default. Closes [#61](https://github.com/visionmedia/superagent/issues/61) +* add `.buffer(false)` cancellation +* add cookie jar support [hunterloftis] +* add agent functionality [hunterloftis] + +# 0.7.0 / 2012-08-03 + +* allow `query()` to be called after the internal `req` has been created [tootallnate] + +# 0.6.0 / 2012-07-17 + +* add `res.send('foo=bar')` default of "application/x-www-form-urlencoded" + +# 0.5.1 / 2012-07-16 + +* add "methods" dep +* add `.end()` arity check to node callbacks +* fix unzip support due to weird node internals + +# 0.5.0 / 2012-06-16 + +* Added "Link" response header field parsing, exposing `res.links` + +# 0.4.3 / 2012-06-15 + +* Added 303, 305 and 307 as redirect status codes [slaskis] +* Fixed passing an object as the url + +# 0.4.2 / 2012-06-02 + +* Added component support +* Fixed redirect data + +# 0.4.1 / 2012-04-13 + +* Added HTTP PATCH support +* Fixed: GET / HEAD when following redirects. Closes [#86](https://github.com/visionmedia/superagent/issues/86) +* Fixed Content-Length detection for multibyte chars + +# 0.4.0 / 2012-03-04 + +* Added `.head()` method [browser]. Closes [#78](https://github.com/visionmedia/superagent/issues/78) +* Added `make test-cov` support +* Added multipart request support. Closes [#11](https://github.com/visionmedia/superagent/issues/11) +* Added all methods that node supports. Closes [#71](https://github.com/visionmedia/superagent/issues/71) +* Added "response" event providing a Response object. Closes [#28](https://github.com/visionmedia/superagent/issues/28) +* Added `.query(obj)`. Closes [#59](https://github.com/visionmedia/superagent/issues/59) +* Added `res.type` (browser). Closes [#54](https://github.com/visionmedia/superagent/issues/54) +* Changed: default `res.body` and `res.files` to {} +* Fixed: port existing query-string fix (browser). Closes [#57](https://github.com/visionmedia/superagent/issues/57) + +# 0.3.0 / 2012-01-24 + +* Added deflate/gzip support [guillermo] +* Added `res.type` (Content-Type void of params) +* Added `res.statusCode` to mirror node +* Added `res.headers` to mirror node +* Changed: parsers take callbacks +* Fixed optional schema support. Closes [#49](https://github.com/visionmedia/superagent/issues/49) + +# 0.2.0 / 2012-01-05 + +* Added url auth support +* Added `.auth(username, password)` +* Added basic auth support [node]. Closes [#41](https://github.com/visionmedia/superagent/issues/41) +* Added `make test-docs` +* Added guillermo's EventEmitter. Closes [#16](https://github.com/visionmedia/superagent/issues/16) +* Removed `Request#data()` for SS, renamed to `send()` +* Removed `Request#data()` from client, renamed to `send()` +* Fixed array support. [browser] +* Fixed array support. Closes [#35](https://github.com/visionmedia/superagent/issues/35) [node] +* Fixed `EventEmitter#emit()` + +# 0.1.3 / 2011-10-25 + +* Added error to callback +* Bumped node dep for 0.5.x + +# 0.1.2 / 2011-09-24 + +* Added markdown documentation +* Added `request(url[, fn])` support to the client +* Added `qs` dependency to package.json +* Added options for `Request#pipe()` +* Added support for `request(url, callback)` +* Added `request(url)` as shortcut for `request.get(url)` +* Added `Request#pipe(stream)` +* Added inherit from `Stream` +* Added multipart support +* Added ssl support (node) +* Removed Content-Length field from client +* Fixed buffering, `setEncoding()` to utf8 [reported by stagas] +* Fixed "end" event when piping + +# 0.1.1 / 2011-08-20 + +* Added `res.redirect` flag (node) +* Added redirect support (node) +* Added `Request#redirects(n)` (node) +* Added `.set(object)` header field support +* Fixed `Content-Length` support + +# 0.1.0 / 2011-08-09 + +* Added support for multiple calls to `.data()` +* Added support for `.get(uri, obj)` +* Added GET `.data()` querystring support +* Added IE{6,7,8} support [alexyoung] + +# 0.0.1 / 2011-08-05 + +* Initial commit + + + diff --git a/node_modules/superagent/LICENSE b/node_modules/superagent/LICENSE new file mode 100644 index 0000000..1b188ba --- /dev/null +++ b/node_modules/superagent/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2016 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/superagent/README.md b/node_modules/superagent/README.md new file mode 100644 index 0000000..79d102a --- /dev/null +++ b/node_modules/superagent/README.md @@ -0,0 +1,274 @@ +# superagent + +[![build status](https://img.shields.io/travis/visionmedia/superagent.svg)](https://travis-ci.org/visionmedia/superagent) +[![code coverage](https://img.shields.io/codecov/c/github/visionmedia/superagent.svg)](https://codecov.io/gh/visionmedia/superagent) +[![code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/sindresorhus/xo) +[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier) +[![made with lass](https://img.shields.io/badge/made_with-lass-95CC28.svg)](https://lass.js.org) +[![license](https://img.shields.io/github/license/visionmedia/superagent.svg)](LICENSE) + +> Small progressive client-side HTTP request library, and Node.js module with the same API, supporting many high-level HTTP client features + + +## Table of Contents + +* [Install](#install) +* [Usage](#usage) + * [Node](#node) + * [Browser](#browser) +* [Supported Platforms](#supported-platforms) + * [Required Browser Features](#required-browser-features) +* [Plugins](#plugins) +* [Upgrading from previous versions](#upgrading-from-previous-versions) +* [Contributors](#contributors) +* [License](#license) + + +## Install + +[npm][]: + +```sh +npm install superagent +``` + +[yarn][]: + +```sh +yarn add superagent +``` + + +## Usage + +### Node + +```js +const superagent = require('superagent'); + +// callback +superagent + .post('/api/pet') + .send({ name: 'Manny', species: 'cat' }) // sends a JSON post body + .set('X-API-Key', 'foobar') + .set('accept', 'json') + .end((err, res) => { + // Calling the end function will send the request + }); + +// promise with then/catch +superagent.post('/api/pet').then(console.log).catch(console.error); + +// promise with async/await +(async () => { + try { + const res = await superagent.post('/api/pet'); + console.log(res); + } catch (err) { + console.error(err); + } +})(); +``` + +### Browser + +**The browser-ready, minified version of `superagent` is only 6 KB (minified and gzipped)!** + +Browser-ready versions of this module are available via [jsdelivr][], [unpkg][], and also in the `node_modules/superagent/dist` folder in downloads of the `superagent` package. + +> Note that we also provide unminified versions with `.js` instead of `.min.js` file extensions. + +#### VanillaJS + +This is the solution for you if you're just using ` + + + + +``` + +#### Bundler + +If you are using [browserify][], [webpack][], [rollup][], or another bundler, then you can follow the same usage as [Node](#node) above. + + +## Supported Platforms + +* Node: v6.x+ +* Browsers (see [.browserslistrc](.browserslistrc)): + + ```sh + npx browserslist + ``` + + ```sh + and_chr 71 + and_ff 64 + and_qq 1.2 + and_uc 11.8 + android 67 + android 4.4.3-4.4.4 + baidu 7.12 + bb 10 + bb 7 + chrome 73 + chrome 72 + chrome 71 + edge 18 + edge 17 + firefox 66 + firefox 65 + ie 11 + ie 10 + ie 9 + ie_mob 11 + ie_mob 10 + ios_saf 12.0-12.1 + ios_saf 11.3-11.4 + op_mini all + op_mob 46 + op_mob 12.1 + opera 58 + opera 57 + safari 12 + safari 11.1 + samsung 8.2 + samsung 7.2-7.4 + ``` + +### Required Browser Features + +We recommend using (specifically with the bundle mentioned in [VanillaJS](#vanillajs) above): + +```html + +``` + +* IE 9-10 requires a polyfill for `Promise`, `Array.from`, `Symbol`, `Object.getOwnPropertySymbols`, and `Object.setPrototypeOf` +* IE 9 requires a polyfill for `window.FormData` (we recommend [formdata-polyfill][]), `Set`, `Math.trunc`, `BigInt`, `Map`, `Reflect`, `WeakMap`, `WeakRef`, and `WeakSet` + + +## Plugins + +SuperAgent is easily extended via plugins. + +```js +const nocache = require('superagent-no-cache'); +const superagent = require('superagent'); +const prefix = require('superagent-prefix')('/static'); + +superagent + .get('/some-url') + .query({ action: 'edit', city: 'London' }) // query string + .use(prefix) // Prefixes *only* this request + .use(nocache) // Prevents caching of *only* this request + .end((err, res) => { + // Do something + }); +``` + +Existing plugins: + +* [superagent-no-cache](https://github.com/johntron/superagent-no-cache) - prevents caching by including Cache-Control header +* [superagent-prefix](https://github.com/johntron/superagent-prefix) - prefixes absolute URLs (useful in test environment) +* [superagent-suffix](https://github.com/timneutkens1/superagent-suffix) - suffix URLs with a given path +* [superagent-mock](https://github.com/M6Web/superagent-mock) - simulate HTTP calls by returning data fixtures based on the requested URL +* [superagent-mocker](https://github.com/shuvalov-anton/superagent-mocker) — simulate REST API +* [superagent-cache](https://github.com/jpodwys/superagent-cache) - A global SuperAgent patch with built-in, flexible caching +* [superagent-cache-plugin](https://github.com/jpodwys/superagent-cache-plugin) - A SuperAgent plugin with built-in, flexible caching +* [superagent-jsonapify](https://github.com/alex94puchades/superagent-jsonapify) - A lightweight [json-api](http://jsonapi.org/format/) client addon for superagent +* [superagent-serializer](https://github.com/zzarcon/superagent-serializer) - Converts server payload into different cases +* [superagent-httpbackend](https://www.npmjs.com/package/superagent-httpbackend) - stub out requests using AngularJS' $httpBackend syntax +* [superagent-throttle](https://github.com/leviwheatcroft/superagent-throttle) - queues and intelligently throttles requests +* [superagent-charset](https://github.com/magicdawn/superagent-charset) - add charset support for node's SuperAgent +* [superagent-verbose-errors](https://github.com/jcoreio/superagent-verbose-errors) - include response body in error messages for failed requests +* [superagent-declare](https://github.com/damoclark/superagent-declare) - A simple [declarative](https://en.wikipedia.org/wiki/Declarative_programming) API for SuperAgent +* [superagent-node-http-timings](https://github.com/webuniverseio/superagent-node-http-timings) - measure http timings in node.js + +Please prefix your plugin with `superagent-*` so that it can easily be found by others. + +For SuperAgent extensions such as couchdb and oauth visit the [wiki](https://github.com/visionmedia/superagent/wiki). + + +## Upgrading from previous versions + +Please see [GitHub releases page](https://github.com/visionmedia/superagent/releases) for the current changelog. + +Our breaking changes are mostly in rarely used functionality and from stricter error handling. + +* [6.0 to 6.1](https://github.com/visionmedia/superagent/releases/tag/v6.1.0) + * Browser behaviour changed to match Node when serializing `application/x-www-form-urlencoded`, using `arrayFormat: 'indices'` semantics of `qs` library. (See: ) +* [5.x to 6.x](https://github.com/visionmedia/superagent/releases/tag/v6.0.0): + * Retry behavior is still opt-in, however we now have a more fine-grained list of status codes and error codes that we retry against (see updated docs) + * A specific issue with Content-Type matching not being case-insensitive is fixed + * Set is now required for IE 9, see [Required Browser Features](#required-browser-features) for more insight +* [4.x to 5.x](https://github.com/visionmedia/superagent/releases/tag/v5.0.0): + * We've implemented the build setup of [Lass](https://lass.js.org) to simplify our stack and linting + * Unminified browserified build size has been reduced from 48KB to 20KB (via `tinyify` and the latest version of Babel using `@babel/preset-env` and `.browserslistrc`) + * Linting support has been added using `caniuse-lite` and `eslint-plugin-compat` + * We can now target what versions of Node we wish to support more easily using `.babelrc` +* [3.x to 4.x](https://github.com/visionmedia/superagent/releases/tag/v4.0.0-alpha.1): + * Ensure you're running Node 6 or later. We've dropped support for Node 4. + * We've started using ES6 and for compatibility with Internet Explorer you may need to use Babel. + * We suggest migrating from `.end()` callbacks to `.then()` or `await`. +* [2.x to 3.x](https://github.com/visionmedia/superagent/releases/tag/v3.0.0): + * Ensure you're running Node 4 or later. We've dropped support for Node 0.x. + * Test code that calls `.send()` multiple times. Invalid calls to `.send()` will now throw instead of sending garbage. +* [1.x to 2.x](https://github.com/visionmedia/superagent/releases/tag/v2.0.0): + * If you use `.parse()` in the *browser* version, rename it to `.serialize()`. + * If you rely on `undefined` in query-string values being sent literally as the text "undefined", switch to checking for missing value instead. `?key=undefined` is now `?key` (without a value). + * If you use `.then()` in Internet Explorer, ensure that you have a polyfill that adds a global `Promise` object. +* 0.x to 1.x: + * Instead of 1-argument callback `.end(function(res){})` use `.then(res => {})`. + + +## Contributors + +| Name | +| ------------------- | +| **Kornel Lesiński** | +| **Peter Lyons** | +| **Hunter Loftis** | +| **Nick Baugh** | + + +## License + +[MIT](LICENSE) © TJ Holowaychuk + + +## + +[npm]: https://www.npmjs.com/ + +[yarn]: https://yarnpkg.com/ + +[formdata-polyfill]: https://www.npmjs.com/package/formdata-polyfill + +[jsdelivr]: https://www.jsdelivr.com/ + +[unpkg]: https://unpkg.com/ + +[browserify]: https://github.com/browserify/browserify + +[webpack]: https://github.com/webpack/webpack + +[rollup]: https://github.com/rollup/rollup diff --git a/node_modules/superagent/dist/superagent.js b/node_modules/superagent/dist/superagent.js new file mode 100644 index 0000000..8cd7332 --- /dev/null +++ b/node_modules/superagent/dist/superagent.js @@ -0,0 +1,3975 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.superagent = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i -1) { + return callBind(intrinsic); + } + + return intrinsic; +}; + +},{"./":3,"get-intrinsic":8}],3:[function(require,module,exports){ +'use strict'; + +var bind = require('function-bind'); + +var GetIntrinsic = require('get-intrinsic'); + +var $apply = GetIntrinsic('%Function.prototype.apply%'); +var $call = GetIntrinsic('%Function.prototype.call%'); +var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply); +var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true); +var $defineProperty = GetIntrinsic('%Object.defineProperty%', true); +var $max = GetIntrinsic('%Math.max%'); + +if ($defineProperty) { + try { + $defineProperty({}, 'a', { + value: 1 + }); + } catch (e) { + $defineProperty = null; + } +} + +module.exports = function callBind(originalFunction) { + var func = $reflectApply(bind, $call, arguments); + + if ($gOPD && $defineProperty) { + var desc = $gOPD(func, 'length'); + + if (desc.configurable) { + $defineProperty(func, 'length', { + value: 1 + $max(0, originalFunction.length - (arguments.length - 1)) + }); + } + } + + return func; +}; + +var applyBind = function applyBind() { + return $reflectApply(bind, $apply, arguments); +}; + +if ($defineProperty) { + $defineProperty(module.exports, 'apply', { + value: applyBind + }); +} else { + module.exports.apply = applyBind; +} + +},{"function-bind":7,"get-intrinsic":8}],4:[function(require,module,exports){ +"use strict"; + +if (typeof module !== 'undefined') { + module.exports = Emitter; +} + +function Emitter(obj) { + if (obj) return mixin(obj); +} + +; + +function mixin(obj) { + for (var key in Emitter.prototype) { + obj[key] = Emitter.prototype[key]; + } + + return obj; +} + +Emitter.prototype.on = Emitter.prototype.addEventListener = function (event, fn) { + this._callbacks = this._callbacks || {}; + (this._callbacks['$' + event] = this._callbacks['$' + event] || []).push(fn); + return this; +}; + +Emitter.prototype.once = function (event, fn) { + function on() { + this.off(event, on); + fn.apply(this, arguments); + } + + on.fn = fn; + this.on(event, on); + return this; +}; + +Emitter.prototype.off = Emitter.prototype.removeListener = Emitter.prototype.removeAllListeners = Emitter.prototype.removeEventListener = function (event, fn) { + this._callbacks = this._callbacks || {}; + + if (0 == arguments.length) { + this._callbacks = {}; + return this; + } + + var callbacks = this._callbacks['$' + event]; + if (!callbacks) return this; + + if (1 == arguments.length) { + delete this._callbacks['$' + event]; + return this; + } + + var cb; + + for (var i = 0; i < callbacks.length; i++) { + cb = callbacks[i]; + + if (cb === fn || cb.fn === fn) { + callbacks.splice(i, 1); + break; + } + } + + if (callbacks.length === 0) { + delete this._callbacks['$' + event]; + } + + return this; +}; + +Emitter.prototype.emit = function (event) { + this._callbacks = this._callbacks || {}; + var args = new Array(arguments.length - 1), + callbacks = this._callbacks['$' + event]; + + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + + if (callbacks) { + callbacks = callbacks.slice(0); + + for (var i = 0, len = callbacks.length; i < len; ++i) { + callbacks[i].apply(this, args); + } + } + + return this; +}; + +Emitter.prototype.listeners = function (event) { + this._callbacks = this._callbacks || {}; + return this._callbacks['$' + event] || []; +}; + +Emitter.prototype.hasListeners = function (event) { + return !!this.listeners(event).length; +}; + +},{}],5:[function(require,module,exports){ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +module.exports = stringify; +stringify.default = stringify; +stringify.stable = deterministicStringify; +stringify.stableStringify = deterministicStringify; +var LIMIT_REPLACE_NODE = '[...]'; +var CIRCULAR_REPLACE_NODE = '[Circular]'; +var arr = []; +var replacerStack = []; + +function defaultOptions() { + return { + depthLimit: Number.MAX_SAFE_INTEGER, + edgesLimit: Number.MAX_SAFE_INTEGER + }; +} + +function stringify(obj, replacer, spacer, options) { + if (typeof options === 'undefined') { + options = defaultOptions(); + } + + decirc(obj, '', 0, [], undefined, 0, options); + var res; + + try { + if (replacerStack.length === 0) { + res = JSON.stringify(obj, replacer, spacer); + } else { + res = JSON.stringify(obj, replaceGetterValues(replacer), spacer); + } + } catch (_) { + return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]'); + } finally { + while (arr.length !== 0) { + var part = arr.pop(); + + if (part.length === 4) { + Object.defineProperty(part[0], part[1], part[3]); + } else { + part[0][part[1]] = part[2]; + } + } + } + + return res; +} + +function setReplace(replace, val, k, parent) { + var propertyDescriptor = Object.getOwnPropertyDescriptor(parent, k); + + if (propertyDescriptor.get !== undefined) { + if (propertyDescriptor.configurable) { + Object.defineProperty(parent, k, { + value: replace + }); + arr.push([parent, k, val, propertyDescriptor]); + } else { + replacerStack.push([val, k, replace]); + } + } else { + parent[k] = replace; + arr.push([parent, k, val]); + } +} + +function decirc(val, k, edgeIndex, stack, parent, depth, options) { + depth += 1; + var i; + + if (_typeof(val) === 'object' && val !== null) { + for (i = 0; i < stack.length; i++) { + if (stack[i] === val) { + setReplace(CIRCULAR_REPLACE_NODE, val, k, parent); + return; + } + } + + if (typeof options.depthLimit !== 'undefined' && depth > options.depthLimit) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent); + return; + } + + if (typeof options.edgesLimit !== 'undefined' && edgeIndex + 1 > options.edgesLimit) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent); + return; + } + + stack.push(val); + + if (Array.isArray(val)) { + for (i = 0; i < val.length; i++) { + decirc(val[i], i, i, stack, val, depth, options); + } + } else { + var keys = Object.keys(val); + + for (i = 0; i < keys.length; i++) { + var key = keys[i]; + decirc(val[key], key, i, stack, val, depth, options); + } + } + + stack.pop(); + } +} + +function compareFunction(a, b) { + if (a < b) { + return -1; + } + + if (a > b) { + return 1; + } + + return 0; +} + +function deterministicStringify(obj, replacer, spacer, options) { + if (typeof options === 'undefined') { + options = defaultOptions(); + } + + var tmp = deterministicDecirc(obj, '', 0, [], undefined, 0, options) || obj; + var res; + + try { + if (replacerStack.length === 0) { + res = JSON.stringify(tmp, replacer, spacer); + } else { + res = JSON.stringify(tmp, replaceGetterValues(replacer), spacer); + } + } catch (_) { + return JSON.stringify('[unable to serialize, circular reference is too complex to analyze]'); + } finally { + while (arr.length !== 0) { + var part = arr.pop(); + + if (part.length === 4) { + Object.defineProperty(part[0], part[1], part[3]); + } else { + part[0][part[1]] = part[2]; + } + } + } + + return res; +} + +function deterministicDecirc(val, k, edgeIndex, stack, parent, depth, options) { + depth += 1; + var i; + + if (_typeof(val) === 'object' && val !== null) { + for (i = 0; i < stack.length; i++) { + if (stack[i] === val) { + setReplace(CIRCULAR_REPLACE_NODE, val, k, parent); + return; + } + } + + try { + if (typeof val.toJSON === 'function') { + return; + } + } catch (_) { + return; + } + + if (typeof options.depthLimit !== 'undefined' && depth > options.depthLimit) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent); + return; + } + + if (typeof options.edgesLimit !== 'undefined' && edgeIndex + 1 > options.edgesLimit) { + setReplace(LIMIT_REPLACE_NODE, val, k, parent); + return; + } + + stack.push(val); + + if (Array.isArray(val)) { + for (i = 0; i < val.length; i++) { + deterministicDecirc(val[i], i, i, stack, val, depth, options); + } + } else { + var tmp = {}; + var keys = Object.keys(val).sort(compareFunction); + + for (i = 0; i < keys.length; i++) { + var key = keys[i]; + deterministicDecirc(val[key], key, i, stack, val, depth, options); + tmp[key] = val[key]; + } + + if (typeof parent !== 'undefined') { + arr.push([parent, k, val]); + parent[k] = tmp; + } else { + return tmp; + } + } + + stack.pop(); + } +} + +function replaceGetterValues(replacer) { + replacer = typeof replacer !== 'undefined' ? replacer : function (k, v) { + return v; + }; + return function (key, val) { + if (replacerStack.length > 0) { + for (var i = 0; i < replacerStack.length; i++) { + var part = replacerStack[i]; + + if (part[1] === key && part[0] === val) { + val = part[2]; + replacerStack.splice(i, 1); + break; + } + } + } + + return replacer.call(this, key, val); + }; +} + +},{}],6:[function(require,module,exports){ +'use strict'; + +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var slice = Array.prototype.slice; +var toStr = Object.prototype.toString; +var funcType = '[object Function]'; + +module.exports = function bind(that) { + var target = this; + + if (typeof target !== 'function' || toStr.call(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + + var args = slice.call(arguments, 1); + var bound; + + var binder = function binder() { + if (this instanceof bound) { + var result = target.apply(this, args.concat(slice.call(arguments))); + + if (Object(result) === result) { + return result; + } + + return this; + } else { + return target.apply(that, args.concat(slice.call(arguments))); + } + }; + + var boundLength = Math.max(0, target.length - args.length); + var boundArgs = []; + + for (var i = 0; i < boundLength; i++) { + boundArgs.push('$' + i); + } + + bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; + } + + return bound; +}; + +},{}],7:[function(require,module,exports){ +'use strict'; + +var implementation = require('./implementation'); + +module.exports = Function.prototype.bind || implementation; + +},{"./implementation":6}],8:[function(require,module,exports){ +'use strict'; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var undefined; +var $SyntaxError = SyntaxError; +var $Function = Function; +var $TypeError = TypeError; + +var getEvalledConstructor = function getEvalledConstructor(expressionSyntax) { + try { + return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')(); + } catch (e) {} +}; + +var $gOPD = Object.getOwnPropertyDescriptor; + +if ($gOPD) { + try { + $gOPD({}, ''); + } catch (e) { + $gOPD = null; + } +} + +var throwTypeError = function throwTypeError() { + throw new $TypeError(); +}; + +var ThrowTypeError = $gOPD ? function () { + try { + arguments.callee; + return throwTypeError; + } catch (calleeThrows) { + try { + return $gOPD(arguments, 'callee').get; + } catch (gOPDthrows) { + return throwTypeError; + } + } +}() : throwTypeError; + +var hasSymbols = require('has-symbols')(); + +var getProto = Object.getPrototypeOf || function (x) { + return x.__proto__; +}; + +var needsEval = {}; +var TypedArray = typeof Uint8Array === 'undefined' ? undefined : getProto(Uint8Array); +var INTRINSICS = { + '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError, + '%Array%': Array, + '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, + '%ArrayIteratorPrototype%': hasSymbols ? getProto([][Symbol.iterator]()) : undefined, + '%AsyncFromSyncIteratorPrototype%': undefined, + '%AsyncFunction%': needsEval, + '%AsyncGenerator%': needsEval, + '%AsyncGeneratorFunction%': needsEval, + '%AsyncIteratorPrototype%': needsEval, + '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, + '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt, + '%Boolean%': Boolean, + '%DataView%': typeof DataView === 'undefined' ? undefined : DataView, + '%Date%': Date, + '%decodeURI%': decodeURI, + '%decodeURIComponent%': decodeURIComponent, + '%encodeURI%': encodeURI, + '%encodeURIComponent%': encodeURIComponent, + '%Error%': Error, + '%eval%': eval, + '%EvalError%': EvalError, + '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, + '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, + '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry, + '%Function%': $Function, + '%GeneratorFunction%': needsEval, + '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, + '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, + '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, + '%isFinite%': isFinite, + '%isNaN%': isNaN, + '%IteratorPrototype%': hasSymbols ? getProto(getProto([][Symbol.iterator]())) : undefined, + '%JSON%': (typeof JSON === "undefined" ? "undefined" : _typeof(JSON)) === 'object' ? JSON : undefined, + '%Map%': typeof Map === 'undefined' ? undefined : Map, + '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols ? undefined : getProto(new Map()[Symbol.iterator]()), + '%Math%': Math, + '%Number%': Number, + '%Object%': Object, + '%parseFloat%': parseFloat, + '%parseInt%': parseInt, + '%Promise%': typeof Promise === 'undefined' ? undefined : Promise, + '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, + '%RangeError%': RangeError, + '%ReferenceError%': ReferenceError, + '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, + '%RegExp%': RegExp, + '%Set%': typeof Set === 'undefined' ? undefined : Set, + '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols ? undefined : getProto(new Set()[Symbol.iterator]()), + '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, + '%String%': String, + '%StringIteratorPrototype%': hasSymbols ? getProto(''[Symbol.iterator]()) : undefined, + '%Symbol%': hasSymbols ? Symbol : undefined, + '%SyntaxError%': $SyntaxError, + '%ThrowTypeError%': ThrowTypeError, + '%TypedArray%': TypedArray, + '%TypeError%': $TypeError, + '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, + '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, + '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, + '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, + '%URIError%': URIError, + '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, + '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef, + '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet +}; + +var doEval = function doEval(name) { + var value; + + if (name === '%AsyncFunction%') { + value = getEvalledConstructor('async function () {}'); + } else if (name === '%GeneratorFunction%') { + value = getEvalledConstructor('function* () {}'); + } else if (name === '%AsyncGeneratorFunction%') { + value = getEvalledConstructor('async function* () {}'); + } else if (name === '%AsyncGenerator%') { + var fn = doEval('%AsyncGeneratorFunction%'); + + if (fn) { + value = fn.prototype; + } + } else if (name === '%AsyncIteratorPrototype%') { + var gen = doEval('%AsyncGenerator%'); + + if (gen) { + value = getProto(gen.prototype); + } + } + + INTRINSICS[name] = value; + return value; +}; + +var LEGACY_ALIASES = { + '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'], + '%ArrayPrototype%': ['Array', 'prototype'], + '%ArrayProto_entries%': ['Array', 'prototype', 'entries'], + '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'], + '%ArrayProto_keys%': ['Array', 'prototype', 'keys'], + '%ArrayProto_values%': ['Array', 'prototype', 'values'], + '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'], + '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'], + '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'], + '%BooleanPrototype%': ['Boolean', 'prototype'], + '%DataViewPrototype%': ['DataView', 'prototype'], + '%DatePrototype%': ['Date', 'prototype'], + '%ErrorPrototype%': ['Error', 'prototype'], + '%EvalErrorPrototype%': ['EvalError', 'prototype'], + '%Float32ArrayPrototype%': ['Float32Array', 'prototype'], + '%Float64ArrayPrototype%': ['Float64Array', 'prototype'], + '%FunctionPrototype%': ['Function', 'prototype'], + '%Generator%': ['GeneratorFunction', 'prototype'], + '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'], + '%Int8ArrayPrototype%': ['Int8Array', 'prototype'], + '%Int16ArrayPrototype%': ['Int16Array', 'prototype'], + '%Int32ArrayPrototype%': ['Int32Array', 'prototype'], + '%JSONParse%': ['JSON', 'parse'], + '%JSONStringify%': ['JSON', 'stringify'], + '%MapPrototype%': ['Map', 'prototype'], + '%NumberPrototype%': ['Number', 'prototype'], + '%ObjectPrototype%': ['Object', 'prototype'], + '%ObjProto_toString%': ['Object', 'prototype', 'toString'], + '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'], + '%PromisePrototype%': ['Promise', 'prototype'], + '%PromiseProto_then%': ['Promise', 'prototype', 'then'], + '%Promise_all%': ['Promise', 'all'], + '%Promise_reject%': ['Promise', 'reject'], + '%Promise_resolve%': ['Promise', 'resolve'], + '%RangeErrorPrototype%': ['RangeError', 'prototype'], + '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'], + '%RegExpPrototype%': ['RegExp', 'prototype'], + '%SetPrototype%': ['Set', 'prototype'], + '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'], + '%StringPrototype%': ['String', 'prototype'], + '%SymbolPrototype%': ['Symbol', 'prototype'], + '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'], + '%TypedArrayPrototype%': ['TypedArray', 'prototype'], + '%TypeErrorPrototype%': ['TypeError', 'prototype'], + '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'], + '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'], + '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'], + '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'], + '%URIErrorPrototype%': ['URIError', 'prototype'], + '%WeakMapPrototype%': ['WeakMap', 'prototype'], + '%WeakSetPrototype%': ['WeakSet', 'prototype'] +}; + +var bind = require('function-bind'); + +var hasOwn = require('has'); + +var $concat = bind.call(Function.call, Array.prototype.concat); +var $spliceApply = bind.call(Function.apply, Array.prototype.splice); +var $replace = bind.call(Function.call, String.prototype.replace); +var $strSlice = bind.call(Function.call, String.prototype.slice); +var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g; +var reEscapeChar = /\\(\\)?/g; + +var stringToPath = function stringToPath(string) { + var first = $strSlice(string, 0, 1); + var last = $strSlice(string, -1); + + if (first === '%' && last !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`'); + } else if (last === '%' && first !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`'); + } + + var result = []; + $replace(string, rePropName, function (match, number, quote, subString) { + result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match; + }); + return result; +}; + +var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) { + var intrinsicName = name; + var alias; + + if (hasOwn(LEGACY_ALIASES, intrinsicName)) { + alias = LEGACY_ALIASES[intrinsicName]; + intrinsicName = '%' + alias[0] + '%'; + } + + if (hasOwn(INTRINSICS, intrinsicName)) { + var value = INTRINSICS[intrinsicName]; + + if (value === needsEval) { + value = doEval(intrinsicName); + } + + if (typeof value === 'undefined' && !allowMissing) { + throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); + } + + return { + alias: alias, + name: intrinsicName, + value: value + }; + } + + throw new $SyntaxError('intrinsic ' + name + ' does not exist!'); +}; + +module.exports = function GetIntrinsic(name, allowMissing) { + if (typeof name !== 'string' || name.length === 0) { + throw new $TypeError('intrinsic name must be a non-empty string'); + } + + if (arguments.length > 1 && typeof allowMissing !== 'boolean') { + throw new $TypeError('"allowMissing" argument must be a boolean'); + } + + var parts = stringToPath(name); + var intrinsicBaseName = parts.length > 0 ? parts[0] : ''; + var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing); + var intrinsicRealName = intrinsic.name; + var value = intrinsic.value; + var skipFurtherCaching = false; + var alias = intrinsic.alias; + + if (alias) { + intrinsicBaseName = alias[0]; + $spliceApply(parts, $concat([0, 1], alias)); + } + + for (var i = 1, isOwn = true; i < parts.length; i += 1) { + var part = parts[i]; + var first = $strSlice(part, 0, 1); + var last = $strSlice(part, -1); + + if ((first === '"' || first === "'" || first === '`' || last === '"' || last === "'" || last === '`') && first !== last) { + throw new $SyntaxError('property names with quotes must have matching quotes'); + } + + if (part === 'constructor' || !isOwn) { + skipFurtherCaching = true; + } + + intrinsicBaseName += '.' + part; + intrinsicRealName = '%' + intrinsicBaseName + '%'; + + if (hasOwn(INTRINSICS, intrinsicRealName)) { + value = INTRINSICS[intrinsicRealName]; + } else if (value != null) { + if (!(part in value)) { + if (!allowMissing) { + throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.'); + } + + return void undefined; + } + + if ($gOPD && i + 1 >= parts.length) { + var desc = $gOPD(value, part); + isOwn = !!desc; + + if (isOwn && 'get' in desc && !('originalValue' in desc.get)) { + value = desc.get; + } else { + value = value[part]; + } + } else { + isOwn = hasOwn(value, part); + value = value[part]; + } + + if (isOwn && !skipFurtherCaching) { + INTRINSICS[intrinsicRealName] = value; + } + } + } + + return value; +}; + +},{"function-bind":7,"has":11,"has-symbols":9}],9:[function(require,module,exports){ +'use strict'; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var origSymbol = typeof Symbol !== 'undefined' && Symbol; + +var hasSymbolSham = require('./shams'); + +module.exports = function hasNativeSymbols() { + if (typeof origSymbol !== 'function') { + return false; + } + + if (typeof Symbol !== 'function') { + return false; + } + + if (_typeof(origSymbol('foo')) !== 'symbol') { + return false; + } + + if (_typeof(Symbol('bar')) !== 'symbol') { + return false; + } + + return hasSymbolSham(); +}; + +},{"./shams":10}],10:[function(require,module,exports){ +'use strict'; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +module.exports = function hasSymbols() { + if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { + return false; + } + + if (_typeof(Symbol.iterator) === 'symbol') { + return true; + } + + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + + if (typeof sym === 'string') { + return false; + } + + if (Object.prototype.toString.call(sym) !== '[object Symbol]') { + return false; + } + + if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { + return false; + } + + var symVal = 42; + obj[sym] = symVal; + + for (sym in obj) { + return false; + } + + if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { + return false; + } + + if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { + return false; + } + + var syms = Object.getOwnPropertySymbols(obj); + + if (syms.length !== 1 || syms[0] !== sym) { + return false; + } + + if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { + return false; + } + + if (typeof Object.getOwnPropertyDescriptor === 'function') { + var descriptor = Object.getOwnPropertyDescriptor(obj, sym); + + if (descriptor.value !== symVal || descriptor.enumerable !== true) { + return false; + } + } + + return true; +}; + +},{}],11:[function(require,module,exports){ +'use strict'; + +var bind = require('function-bind'); + +module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); + +},{"function-bind":7}],12:[function(require,module,exports){ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var hasMap = typeof Map === 'function' && Map.prototype; +var mapSizeDescriptor = Object.getOwnPropertyDescriptor && hasMap ? Object.getOwnPropertyDescriptor(Map.prototype, 'size') : null; +var mapSize = hasMap && mapSizeDescriptor && typeof mapSizeDescriptor.get === 'function' ? mapSizeDescriptor.get : null; +var mapForEach = hasMap && Map.prototype.forEach; +var hasSet = typeof Set === 'function' && Set.prototype; +var setSizeDescriptor = Object.getOwnPropertyDescriptor && hasSet ? Object.getOwnPropertyDescriptor(Set.prototype, 'size') : null; +var setSize = hasSet && setSizeDescriptor && typeof setSizeDescriptor.get === 'function' ? setSizeDescriptor.get : null; +var setForEach = hasSet && Set.prototype.forEach; +var hasWeakMap = typeof WeakMap === 'function' && WeakMap.prototype; +var weakMapHas = hasWeakMap ? WeakMap.prototype.has : null; +var hasWeakSet = typeof WeakSet === 'function' && WeakSet.prototype; +var weakSetHas = hasWeakSet ? WeakSet.prototype.has : null; +var hasWeakRef = typeof WeakRef === 'function' && WeakRef.prototype; +var weakRefDeref = hasWeakRef ? WeakRef.prototype.deref : null; +var booleanValueOf = Boolean.prototype.valueOf; +var objectToString = Object.prototype.toString; +var functionToString = Function.prototype.toString; +var $match = String.prototype.match; +var $slice = String.prototype.slice; +var $replace = String.prototype.replace; +var $toUpperCase = String.prototype.toUpperCase; +var $toLowerCase = String.prototype.toLowerCase; +var $test = RegExp.prototype.test; +var $concat = Array.prototype.concat; +var $join = Array.prototype.join; +var $arrSlice = Array.prototype.slice; +var $floor = Math.floor; +var bigIntValueOf = typeof BigInt === 'function' ? BigInt.prototype.valueOf : null; +var gOPS = Object.getOwnPropertySymbols; +var symToString = typeof Symbol === 'function' && _typeof(Symbol.iterator) === 'symbol' ? Symbol.prototype.toString : null; +var hasShammedSymbols = typeof Symbol === 'function' && _typeof(Symbol.iterator) === 'object'; +var toStringTag = typeof Symbol === 'function' && Symbol.toStringTag && (_typeof(Symbol.toStringTag) === hasShammedSymbols ? 'object' : 'symbol') ? Symbol.toStringTag : null; +var isEnumerable = Object.prototype.propertyIsEnumerable; +var gPO = (typeof Reflect === 'function' ? Reflect.getPrototypeOf : Object.getPrototypeOf) || ([].__proto__ === Array.prototype ? function (O) { + return O.__proto__; +} : null); + +function addNumericSeparator(num, str) { + if (num === Infinity || num === -Infinity || num !== num || num && num > -1000 && num < 1000 || $test.call(/e/, str)) { + return str; + } + + var sepRegex = /[0-9](?=(?:[0-9]{3})+(?![0-9]))/g; + + if (typeof num === 'number') { + var int = num < 0 ? -$floor(-num) : $floor(num); + + if (int !== num) { + var intStr = String(int); + var dec = $slice.call(str, intStr.length + 1); + return $replace.call(intStr, sepRegex, '$&_') + '.' + $replace.call($replace.call(dec, /([0-9]{3})/g, '$&_'), /_$/, ''); + } + } + + return $replace.call(str, sepRegex, '$&_'); +} + +var utilInspect = require('./util.inspect'); + +var inspectCustom = utilInspect.custom; +var inspectSymbol = isSymbol(inspectCustom) ? inspectCustom : null; + +module.exports = function inspect_(obj, options, depth, seen) { + var opts = options || {}; + + if (has(opts, 'quoteStyle') && opts.quoteStyle !== 'single' && opts.quoteStyle !== 'double') { + throw new TypeError('option "quoteStyle" must be "single" or "double"'); + } + + if (has(opts, 'maxStringLength') && (typeof opts.maxStringLength === 'number' ? opts.maxStringLength < 0 && opts.maxStringLength !== Infinity : opts.maxStringLength !== null)) { + throw new TypeError('option "maxStringLength", if provided, must be a positive integer, Infinity, or `null`'); + } + + var customInspect = has(opts, 'customInspect') ? opts.customInspect : true; + + if (typeof customInspect !== 'boolean' && customInspect !== 'symbol') { + throw new TypeError('option "customInspect", if provided, must be `true`, `false`, or `\'symbol\'`'); + } + + if (has(opts, 'indent') && opts.indent !== null && opts.indent !== '\t' && !(parseInt(opts.indent, 10) === opts.indent && opts.indent > 0)) { + throw new TypeError('option "indent" must be "\\t", an integer > 0, or `null`'); + } + + if (has(opts, 'numericSeparator') && typeof opts.numericSeparator !== 'boolean') { + throw new TypeError('option "numericSeparator", if provided, must be `true` or `false`'); + } + + var numericSeparator = opts.numericSeparator; + + if (typeof obj === 'undefined') { + return 'undefined'; + } + + if (obj === null) { + return 'null'; + } + + if (typeof obj === 'boolean') { + return obj ? 'true' : 'false'; + } + + if (typeof obj === 'string') { + return inspectString(obj, opts); + } + + if (typeof obj === 'number') { + if (obj === 0) { + return Infinity / obj > 0 ? '0' : '-0'; + } + + var str = String(obj); + return numericSeparator ? addNumericSeparator(obj, str) : str; + } + + if (typeof obj === 'bigint') { + var bigIntStr = String(obj) + 'n'; + return numericSeparator ? addNumericSeparator(obj, bigIntStr) : bigIntStr; + } + + var maxDepth = typeof opts.depth === 'undefined' ? 5 : opts.depth; + + if (typeof depth === 'undefined') { + depth = 0; + } + + if (depth >= maxDepth && maxDepth > 0 && _typeof(obj) === 'object') { + return isArray(obj) ? '[Array]' : '[Object]'; + } + + var indent = getIndent(opts, depth); + + if (typeof seen === 'undefined') { + seen = []; + } else if (indexOf(seen, obj) >= 0) { + return '[Circular]'; + } + + function inspect(value, from, noIndent) { + if (from) { + seen = $arrSlice.call(seen); + seen.push(from); + } + + if (noIndent) { + var newOpts = { + depth: opts.depth + }; + + if (has(opts, 'quoteStyle')) { + newOpts.quoteStyle = opts.quoteStyle; + } + + return inspect_(value, newOpts, depth + 1, seen); + } + + return inspect_(value, opts, depth + 1, seen); + } + + if (typeof obj === 'function' && !isRegExp(obj)) { + var name = nameOf(obj); + var keys = arrObjKeys(obj, inspect); + return '[Function' + (name ? ': ' + name : ' (anonymous)') + ']' + (keys.length > 0 ? ' { ' + $join.call(keys, ', ') + ' }' : ''); + } + + if (isSymbol(obj)) { + var symString = hasShammedSymbols ? $replace.call(String(obj), /^(Symbol\(.*\))_[^)]*$/, '$1') : symToString.call(obj); + return _typeof(obj) === 'object' && !hasShammedSymbols ? markBoxed(symString) : symString; + } + + if (isElement(obj)) { + var s = '<' + $toLowerCase.call(String(obj.nodeName)); + var attrs = obj.attributes || []; + + for (var i = 0; i < attrs.length; i++) { + s += ' ' + attrs[i].name + '=' + wrapQuotes(quote(attrs[i].value), 'double', opts); + } + + s += '>'; + + if (obj.childNodes && obj.childNodes.length) { + s += '...'; + } + + s += ''; + return s; + } + + if (isArray(obj)) { + if (obj.length === 0) { + return '[]'; + } + + var xs = arrObjKeys(obj, inspect); + + if (indent && !singleLineValues(xs)) { + return '[' + indentedJoin(xs, indent) + ']'; + } + + return '[ ' + $join.call(xs, ', ') + ' ]'; + } + + if (isError(obj)) { + var parts = arrObjKeys(obj, inspect); + + if (!('cause' in Error.prototype) && 'cause' in obj && !isEnumerable.call(obj, 'cause')) { + return '{ [' + String(obj) + '] ' + $join.call($concat.call('[cause]: ' + inspect(obj.cause), parts), ', ') + ' }'; + } + + if (parts.length === 0) { + return '[' + String(obj) + ']'; + } + + return '{ [' + String(obj) + '] ' + $join.call(parts, ', ') + ' }'; + } + + if (_typeof(obj) === 'object' && customInspect) { + if (inspectSymbol && typeof obj[inspectSymbol] === 'function' && utilInspect) { + return utilInspect(obj, { + depth: maxDepth - depth + }); + } else if (customInspect !== 'symbol' && typeof obj.inspect === 'function') { + return obj.inspect(); + } + } + + if (isMap(obj)) { + var mapParts = []; + mapForEach.call(obj, function (value, key) { + mapParts.push(inspect(key, obj, true) + ' => ' + inspect(value, obj)); + }); + return collectionOf('Map', mapSize.call(obj), mapParts, indent); + } + + if (isSet(obj)) { + var setParts = []; + setForEach.call(obj, function (value) { + setParts.push(inspect(value, obj)); + }); + return collectionOf('Set', setSize.call(obj), setParts, indent); + } + + if (isWeakMap(obj)) { + return weakCollectionOf('WeakMap'); + } + + if (isWeakSet(obj)) { + return weakCollectionOf('WeakSet'); + } + + if (isWeakRef(obj)) { + return weakCollectionOf('WeakRef'); + } + + if (isNumber(obj)) { + return markBoxed(inspect(Number(obj))); + } + + if (isBigInt(obj)) { + return markBoxed(inspect(bigIntValueOf.call(obj))); + } + + if (isBoolean(obj)) { + return markBoxed(booleanValueOf.call(obj)); + } + + if (isString(obj)) { + return markBoxed(inspect(String(obj))); + } + + if (!isDate(obj) && !isRegExp(obj)) { + var ys = arrObjKeys(obj, inspect); + var isPlainObject = gPO ? gPO(obj) === Object.prototype : obj instanceof Object || obj.constructor === Object; + var protoTag = obj instanceof Object ? '' : 'null prototype'; + var stringTag = !isPlainObject && toStringTag && Object(obj) === obj && toStringTag in obj ? $slice.call(toStr(obj), 8, -1) : protoTag ? 'Object' : ''; + var constructorTag = isPlainObject || typeof obj.constructor !== 'function' ? '' : obj.constructor.name ? obj.constructor.name + ' ' : ''; + var tag = constructorTag + (stringTag || protoTag ? '[' + $join.call($concat.call([], stringTag || [], protoTag || []), ': ') + '] ' : ''); + + if (ys.length === 0) { + return tag + '{}'; + } + + if (indent) { + return tag + '{' + indentedJoin(ys, indent) + '}'; + } + + return tag + '{ ' + $join.call(ys, ', ') + ' }'; + } + + return String(obj); +}; + +function wrapQuotes(s, defaultStyle, opts) { + var quoteChar = (opts.quoteStyle || defaultStyle) === 'double' ? '"' : "'"; + return quoteChar + s + quoteChar; +} + +function quote(s) { + return $replace.call(String(s), /"/g, '"'); +} + +function isArray(obj) { + return toStr(obj) === '[object Array]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isDate(obj) { + return toStr(obj) === '[object Date]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isRegExp(obj) { + return toStr(obj) === '[object RegExp]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isError(obj) { + return toStr(obj) === '[object Error]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isString(obj) { + return toStr(obj) === '[object String]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isNumber(obj) { + return toStr(obj) === '[object Number]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isBoolean(obj) { + return toStr(obj) === '[object Boolean]' && (!toStringTag || !(_typeof(obj) === 'object' && toStringTag in obj)); +} + +function isSymbol(obj) { + if (hasShammedSymbols) { + return obj && _typeof(obj) === 'object' && obj instanceof Symbol; + } + + if (_typeof(obj) === 'symbol') { + return true; + } + + if (!obj || _typeof(obj) !== 'object' || !symToString) { + return false; + } + + try { + symToString.call(obj); + return true; + } catch (e) {} + + return false; +} + +function isBigInt(obj) { + if (!obj || _typeof(obj) !== 'object' || !bigIntValueOf) { + return false; + } + + try { + bigIntValueOf.call(obj); + return true; + } catch (e) {} + + return false; +} + +var hasOwn = Object.prototype.hasOwnProperty || function (key) { + return key in this; +}; + +function has(obj, key) { + return hasOwn.call(obj, key); +} + +function toStr(obj) { + return objectToString.call(obj); +} + +function nameOf(f) { + if (f.name) { + return f.name; + } + + var m = $match.call(functionToString.call(f), /^function\s*([\w$]+)/); + + if (m) { + return m[1]; + } + + return null; +} + +function indexOf(xs, x) { + if (xs.indexOf) { + return xs.indexOf(x); + } + + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) { + return i; + } + } + + return -1; +} + +function isMap(x) { + if (!mapSize || !x || _typeof(x) !== 'object') { + return false; + } + + try { + mapSize.call(x); + + try { + setSize.call(x); + } catch (s) { + return true; + } + + return x instanceof Map; + } catch (e) {} + + return false; +} + +function isWeakMap(x) { + if (!weakMapHas || !x || _typeof(x) !== 'object') { + return false; + } + + try { + weakMapHas.call(x, weakMapHas); + + try { + weakSetHas.call(x, weakSetHas); + } catch (s) { + return true; + } + + return x instanceof WeakMap; + } catch (e) {} + + return false; +} + +function isWeakRef(x) { + if (!weakRefDeref || !x || _typeof(x) !== 'object') { + return false; + } + + try { + weakRefDeref.call(x); + return true; + } catch (e) {} + + return false; +} + +function isSet(x) { + if (!setSize || !x || _typeof(x) !== 'object') { + return false; + } + + try { + setSize.call(x); + + try { + mapSize.call(x); + } catch (m) { + return true; + } + + return x instanceof Set; + } catch (e) {} + + return false; +} + +function isWeakSet(x) { + if (!weakSetHas || !x || _typeof(x) !== 'object') { + return false; + } + + try { + weakSetHas.call(x, weakSetHas); + + try { + weakMapHas.call(x, weakMapHas); + } catch (s) { + return true; + } + + return x instanceof WeakSet; + } catch (e) {} + + return false; +} + +function isElement(x) { + if (!x || _typeof(x) !== 'object') { + return false; + } + + if (typeof HTMLElement !== 'undefined' && x instanceof HTMLElement) { + return true; + } + + return typeof x.nodeName === 'string' && typeof x.getAttribute === 'function'; +} + +function inspectString(str, opts) { + if (str.length > opts.maxStringLength) { + var remaining = str.length - opts.maxStringLength; + var trailer = '... ' + remaining + ' more character' + (remaining > 1 ? 's' : ''); + return inspectString($slice.call(str, 0, opts.maxStringLength), opts) + trailer; + } + + var s = $replace.call($replace.call(str, /(['\\])/g, '\\$1'), /[\x00-\x1f]/g, lowbyte); + return wrapQuotes(s, 'single', opts); +} + +function lowbyte(c) { + var n = c.charCodeAt(0); + var x = { + 8: 'b', + 9: 't', + 10: 'n', + 12: 'f', + 13: 'r' + }[n]; + + if (x) { + return '\\' + x; + } + + return '\\x' + (n < 0x10 ? '0' : '') + $toUpperCase.call(n.toString(16)); +} + +function markBoxed(str) { + return 'Object(' + str + ')'; +} + +function weakCollectionOf(type) { + return type + ' { ? }'; +} + +function collectionOf(type, size, entries, indent) { + var joinedEntries = indent ? indentedJoin(entries, indent) : $join.call(entries, ', '); + return type + ' (' + size + ') {' + joinedEntries + '}'; +} + +function singleLineValues(xs) { + for (var i = 0; i < xs.length; i++) { + if (indexOf(xs[i], '\n') >= 0) { + return false; + } + } + + return true; +} + +function getIndent(opts, depth) { + var baseIndent; + + if (opts.indent === '\t') { + baseIndent = '\t'; + } else if (typeof opts.indent === 'number' && opts.indent > 0) { + baseIndent = $join.call(Array(opts.indent + 1), ' '); + } else { + return null; + } + + return { + base: baseIndent, + prev: $join.call(Array(depth + 1), baseIndent) + }; +} + +function indentedJoin(xs, indent) { + if (xs.length === 0) { + return ''; + } + + var lineJoiner = '\n' + indent.prev + indent.base; + return lineJoiner + $join.call(xs, ',' + lineJoiner) + '\n' + indent.prev; +} + +function arrObjKeys(obj, inspect) { + var isArr = isArray(obj); + var xs = []; + + if (isArr) { + xs.length = obj.length; + + for (var i = 0; i < obj.length; i++) { + xs[i] = has(obj, i) ? inspect(obj[i], obj) : ''; + } + } + + var syms = typeof gOPS === 'function' ? gOPS(obj) : []; + var symMap; + + if (hasShammedSymbols) { + symMap = {}; + + for (var k = 0; k < syms.length; k++) { + symMap['$' + syms[k]] = syms[k]; + } + } + + for (var key in obj) { + if (!has(obj, key)) { + continue; + } + + if (isArr && String(Number(key)) === key && key < obj.length) { + continue; + } + + if (hasShammedSymbols && symMap['$' + key] instanceof Symbol) { + continue; + } else if ($test.call(/[^\w$]/, key)) { + xs.push(inspect(key, obj) + ': ' + inspect(obj[key], obj)); + } else { + xs.push(key + ': ' + inspect(obj[key], obj)); + } + } + + if (typeof gOPS === 'function') { + for (var j = 0; j < syms.length; j++) { + if (isEnumerable.call(obj, syms[j])) { + xs.push('[' + inspect(syms[j]) + ']: ' + inspect(obj[syms[j]], obj)); + } + } + } + + return xs; +} + +},{"./util.inspect":1}],13:[function(require,module,exports){ +"use strict"; + +var process = module.exports = {}; +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} + +function defaultClearTimeout() { + throw new Error('clearTimeout has not been defined'); +} + +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +})(); + +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + return setTimeout(fun, 0); + } + + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + + try { + return cachedSetTimeout(fun, 0); + } catch (e) { + try { + return cachedSetTimeout.call(null, fun, 0); + } catch (e) { + return cachedSetTimeout.call(this, fun, 0); + } + } +} + +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + return clearTimeout(marker); + } + + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + + try { + return cachedClearTimeout(marker); + } catch (e) { + try { + return cachedClearTimeout.call(null, marker); + } catch (e) { + return cachedClearTimeout.call(this, marker); + } + } +} + +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + + draining = false; + + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + + var timeout = runTimeout(cleanUpNextTick); + draining = true; + var len = queue.length; + + while (len) { + currentQueue = queue; + queue = []; + + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + + queueIndex = -1; + len = queue.length; + } + + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + + queue.push(new Item(fun, args)); + + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +function Item(fun, array) { + this.fun = fun; + this.array = array; +} + +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; + +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; +process.prependListener = noop; +process.prependOnceListener = noop; + +process.listeners = function (name) { + return []; +}; + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { + return '/'; +}; + +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; + +process.umask = function () { + return 0; +}; + +},{}],14:[function(require,module,exports){ +'use strict'; + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; +var Format = { + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; +module.exports = { + 'default': Format.RFC3986, + formatters: { + RFC1738: function RFC1738(value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function RFC3986(value) { + return String(value); + } + }, + RFC1738: Format.RFC1738, + RFC3986: Format.RFC3986 +}; + +},{}],15:[function(require,module,exports){ +'use strict'; + +var stringify = require('./stringify'); + +var parse = require('./parse'); + +var formats = require('./formats'); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; + +},{"./formats":14,"./parse":16,"./stringify":17}],16:[function(require,module,exports){ +'use strict'; + +var utils = require('./utils'); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; +var defaults = { + allowDots: false, + allowPrototypes: false, + allowSparse: false, + arrayLimit: 20, + charset: 'utf-8', + charsetSentinel: false, + comma: false, + decoder: utils.decode, + delimiter: '&', + depth: 5, + ignoreQueryPrefix: false, + interpretNumericEntities: false, + parameterLimit: 1000, + parseArrays: true, + plainObjects: false, + strictNullHandling: false +}; + +var interpretNumericEntities = function interpretNumericEntities(str) { + return str.replace(/&#(\d+);/g, function ($0, numberStr) { + return String.fromCharCode(parseInt(numberStr, 10)); + }); +}; + +var parseArrayValue = function parseArrayValue(val, options) { + if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) { + return val.split(','); + } + + return val; +}; + +var isoSentinel = 'utf8=%26%2310003%3B'; +var charsetSentinel = 'utf8=%E2%9C%93'; + +var parseValues = function parseQueryStringValues(str, options) { + var obj = {}; + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; + var i; + var charset = options.charset; + + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + + skipIndex = i; + i = parts.length; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + + var part = parts[i]; + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + var key, val; + + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = utils.maybeMap(parseArrayValue(part.slice(pos + 1), options), function (encodedVal) { + return options.decoder(encodedVal, defaults.decoder, charset, 'value'); + }); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + if (has.call(obj, key)) { + obj[key] = utils.combine(obj[key], val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function parseObject(chain, val, options, valuesParsed) { + var leaf = valuesParsed ? val : parseArrayValue(val, options); + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var index = parseInt(cleanRoot, 10); + + if (!options.parseArrays && cleanRoot === '') { + obj = { + 0: leaf + }; + } else if (!isNaN(index) && root !== cleanRoot && String(index) === cleanRoot && index >= 0 && options.parseArrays && index <= options.arrayLimit) { + obj = []; + obj[index] = leaf; + } else if (cleanRoot !== '__proto__') { + obj[cleanRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) { + if (!givenKey) { + return; + } + + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + var keys = []; + + if (parent) { + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + var i = 0; + + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options, valuesParsed); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.decoder !== null && opts.decoder !== undefined && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + return { + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + allowSparse: typeof opts.allowSparse === 'boolean' ? opts.allowSparse : defaults.allowSparse, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + depth: typeof opts.depth === 'number' || opts.depth === false ? +opts.depth : defaults.depth, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + var keys = Object.keys(tempObj); + + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string'); + obj = utils.merge(obj, newObj, options); + } + + if (options.allowSparse === true) { + return obj; + } + + return utils.compact(obj); +}; + +},{"./utils":18}],17:[function(require,module,exports){ +'use strict'; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var getSideChannel = require('side-channel'); + +var utils = require('./utils'); + +var formats = require('./formats'); + +var has = Object.prototype.hasOwnProperty; +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; +var isArray = Array.isArray; +var split = String.prototype.split; +var push = Array.prototype.push; + +var pushToArray = function pushToArray(arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || _typeof(v) === 'symbol' || typeof v === 'bigint'; +}; + +var sentinel = {}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots, serializeDate, format, formatter, encodeValuesOnly, charset, sideChannel) { + var obj = object; + var tmpSc = sideChannel; + var step = 0; + var findFlag = false; + + while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) { + var pos = tmpSc.get(object); + step += 1; + + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } else { + findFlag = true; + } + } + + if (typeof tmpSc.get(sentinel) === 'undefined') { + step = 0; + } + } + + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = utils.maybeMap(obj, function (value) { + if (value instanceof Date) { + return serializeDate(value); + } + + return value; + }); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key', format) : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format); + + if (generateArrayPrefix === 'comma' && encodeValuesOnly) { + var valuesArray = split.call(String(obj), ','); + var valuesJoined = ''; + + for (var i = 0; i < valuesArray.length; ++i) { + valuesJoined += (i === 0 ? '' : ',') + formatter(encoder(valuesArray[i], defaults.encoder, charset, 'value', format)); + } + + return [formatter(keyValue) + '=' + valuesJoined]; + } + + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))]; + } + + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + + if (generateArrayPrefix === 'comma' && isArray(obj)) { + objKeys = [{ + value: obj.length > 0 ? obj.join(',') || null : void undefined + }]; + } else if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var j = 0; j < objKeys.length; ++j) { + var key = objKeys[j]; + var value = _typeof(key) === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key]; + + if (skipNulls && value === null) { + continue; + } + + var keyPrefix = isArray(obj) ? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(prefix, key) : prefix : prefix + (allowDots ? '.' + key : '[' + key + ']'); + sideChannel.set(object, step); + var valueSideChannel = getSideChannel(); + valueSideChannel.set(sentinel, sideChannel); + pushToArray(values, stringify(value, keyPrefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots, serializeDate, format, formatter, encodeValuesOnly, charset, valueSideChannel)); + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + + format = opts.format; + } + + var formatter = formats.formatters[format]; + var filter = defaults.filter; + + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: typeof opts.allowDots === 'undefined' ? defaults.allowDots : !!opts.allowDots, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (_typeof(obj) !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + + if (opts && opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if (opts && 'indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + var sideChannel = getSideChannel(); + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + + pushToArray(keys, stringify(obj[key], key, generateArrayPrefix, options.strictNullHandling, options.skipNulls, options.encode ? options.encoder : null, options.filter, options.sort, options.allowDots, options.serializeDate, options.format, options.formatter, options.encodeValuesOnly, options.charset, sideChannel)); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + prefix += 'utf8=%26%2310003%3B&'; + } else { + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; + +},{"./formats":14,"./utils":18,"side-channel":19}],18:[function(require,module,exports){ +'use strict'; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var formats = require('./formats'); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = function () { + var array = []; + + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}(); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + if (!source) { + return target; + } + + if (_typeof(source) !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && _typeof(target) === 'object') { + if (options && (options.plainObjects || options.allowPrototypes) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || _typeof(target) !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + + if (targetItem && _typeof(targetItem) === 'object' && item && _typeof(item) === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function decode(str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + + if (charset === 'iso-8859-1') { + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var encode = function encode(str, defaultEncoder, charset, kind, format) { + if (str.length === 0) { + return str; + } + + var string = str; + + if (_typeof(str) === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if (c === 0x2D || c === 0x2E || c === 0x5F || c === 0x7E || c >= 0x30 && c <= 0x39 || c >= 0x41 && c <= 0x5A || c >= 0x61 && c <= 0x7A || format === formats.RFC1738 && (c === 0x28 || c === 0x29)) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | c >> 6] + hexTable[0x80 | c & 0x3F]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | c >> 12] + hexTable[0x80 | c >> 6 & 0x3F] + hexTable[0x80 | c & 0x3F]); + continue; + } + + i += 1; + c = 0x10000 + ((c & 0x3FF) << 10 | string.charCodeAt(i) & 0x3FF); + out += hexTable[0xF0 | c >> 18] + hexTable[0x80 | c >> 12 & 0x3F] + hexTable[0x80 | c >> 6 & 0x3F] + hexTable[0x80 | c & 0x3F]; + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ + obj: { + o: value + }, + prop: 'o' + }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + var keys = Object.keys(obj); + + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + + if (_typeof(val) === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ + obj: obj, + prop: key + }); + refs.push(val); + } + } + } + + compactQueue(queue); + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || _typeof(obj) !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +var maybeMap = function maybeMap(val, fn) { + if (isArray(val)) { + var mapped = []; + + for (var i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + + return mapped; + } + + return fn(val); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + maybeMap: maybeMap, + merge: merge +}; + +},{"./formats":14}],19:[function(require,module,exports){ +'use strict'; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var GetIntrinsic = require('get-intrinsic'); + +var callBound = require('call-bind/callBound'); + +var inspect = require('object-inspect'); + +var $TypeError = GetIntrinsic('%TypeError%'); +var $WeakMap = GetIntrinsic('%WeakMap%', true); +var $Map = GetIntrinsic('%Map%', true); +var $weakMapGet = callBound('WeakMap.prototype.get', true); +var $weakMapSet = callBound('WeakMap.prototype.set', true); +var $weakMapHas = callBound('WeakMap.prototype.has', true); +var $mapGet = callBound('Map.prototype.get', true); +var $mapSet = callBound('Map.prototype.set', true); +var $mapHas = callBound('Map.prototype.has', true); + +var listGetNode = function listGetNode(list, key) { + for (var prev = list, curr; (curr = prev.next) !== null; prev = curr) { + if (curr.key === key) { + prev.next = curr.next; + curr.next = list.next; + list.next = curr; + return curr; + } + } +}; + +var listGet = function listGet(objects, key) { + var node = listGetNode(objects, key); + return node && node.value; +}; + +var listSet = function listSet(objects, key, value) { + var node = listGetNode(objects, key); + + if (node) { + node.value = value; + } else { + objects.next = { + key: key, + next: objects.next, + value: value + }; + } +}; + +var listHas = function listHas(objects, key) { + return !!listGetNode(objects, key); +}; + +module.exports = function getSideChannel() { + var $wm; + var $m; + var $o; + var channel = { + assert: function assert(key) { + if (!channel.has(key)) { + throw new $TypeError('Side channel does not contain ' + inspect(key)); + } + }, + get: function get(key) { + if ($WeakMap && key && (_typeof(key) === 'object' || typeof key === 'function')) { + if ($wm) { + return $weakMapGet($wm, key); + } + } else if ($Map) { + if ($m) { + return $mapGet($m, key); + } + } else { + if ($o) { + return listGet($o, key); + } + } + }, + has: function has(key) { + if ($WeakMap && key && (_typeof(key) === 'object' || typeof key === 'function')) { + if ($wm) { + return $weakMapHas($wm, key); + } + } else if ($Map) { + if ($m) { + return $mapHas($m, key); + } + } else { + if ($o) { + return listHas($o, key); + } + } + + return false; + }, + set: function set(key, value) { + if ($WeakMap && key && (_typeof(key) === 'object' || typeof key === 'function')) { + if (!$wm) { + $wm = new $WeakMap(); + } + + $weakMapSet($wm, key, value); + } else if ($Map) { + if (!$m) { + $m = new $Map(); + } + + $mapSet($m, key, value); + } else { + if (!$o) { + $o = { + key: {}, + next: null + }; + } + + listSet($o, key, value); + } + } + }; + return channel; +}; + +},{"call-bind/callBound":2,"get-intrinsic":8,"object-inspect":12}],20:[function(require,module,exports){ +"use strict"; + +function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); } + +function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter); } + +function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +function Agent() { + this._defaults = []; +} + +var _loop = function _loop() { + var fn = _arr[_i]; + + Agent.prototype[fn] = function () { + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + this._defaults.push({ + fn: fn, + args: args + }); + + return this; + }; +}; + +for (var _i = 0, _arr = ['use', 'on', 'once', 'set', 'query', 'type', 'accept', 'auth', 'withCredentials', 'sortQuery', 'retry', 'ok', 'redirects', 'timeout', 'buffer', 'serialize', 'parse', 'ca', 'key', 'pfx', 'cert', 'disableTLSCerts']; _i < _arr.length; _i++) { + _loop(); +} + +Agent.prototype._setDefaults = function (request) { + var _iterator = _createForOfIteratorHelper(this._defaults), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var def = _step.value; + request[def.fn].apply(request, _toConsumableArray(def.args)); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } +}; + +module.exports = Agent; + +},{}],21:[function(require,module,exports){ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +var root; + +if (typeof window !== 'undefined') { + root = window; +} else if (typeof self === 'undefined') { + console.warn('Using browser-only version of superagent in non-browser environment'); + root = void 0; +} else { + root = self; +} + +var Emitter = require('component-emitter'); + +var safeStringify = require('fast-safe-stringify'); + +var qs = require('qs'); + +var RequestBase = require('./request-base'); + +var _require = require('./utils'), + isObject = _require.isObject, + mixin = _require.mixin, + hasOwn = _require.hasOwn; + +var ResponseBase = require('./response-base'); + +var Agent = require('./agent-base'); + +function noop() {} + +module.exports = function (method, url) { + if (typeof url === 'function') { + return new exports.Request('GET', method).end(url); + } + + if (arguments.length === 1) { + return new exports.Request('GET', method); + } + + return new exports.Request(method, url); +}; + +exports = module.exports; +var request = exports; +exports.Request = Request; + +request.getXHR = function () { + if (root.XMLHttpRequest && (!root.location || root.location.protocol !== 'file:' || !root.ActiveXObject)) { + return new XMLHttpRequest(); + } + + try { + return new ActiveXObject('Microsoft.XMLHTTP'); + } catch (_unused) {} + + try { + return new ActiveXObject('Msxml2.XMLHTTP.6.0'); + } catch (_unused2) {} + + try { + return new ActiveXObject('Msxml2.XMLHTTP.3.0'); + } catch (_unused3) {} + + try { + return new ActiveXObject('Msxml2.XMLHTTP'); + } catch (_unused4) {} + + throw new Error('Browser-only version of superagent could not find XHR'); +}; + +var trim = ''.trim ? function (s) { + return s.trim(); +} : function (s) { + return s.replace(/(^\s*|\s*$)/g, ''); +}; + +function serialize(object) { + if (!isObject(object)) return object; + var pairs = []; + + for (var key in object) { + if (hasOwn(object, key)) pushEncodedKeyValuePair(pairs, key, object[key]); + } + + return pairs.join('&'); +} + +function pushEncodedKeyValuePair(pairs, key, value) { + if (value === undefined) return; + + if (value === null) { + pairs.push(encodeURI(key)); + return; + } + + if (Array.isArray(value)) { + var _iterator = _createForOfIteratorHelper(value), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var v = _step.value; + pushEncodedKeyValuePair(pairs, key, v); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + } else if (isObject(value)) { + for (var subkey in value) { + if (hasOwn(value, subkey)) pushEncodedKeyValuePair(pairs, "".concat(key, "[").concat(subkey, "]"), value[subkey]); + } + } else { + pairs.push(encodeURI(key) + '=' + encodeURIComponent(value)); + } +} + +request.serializeObject = serialize; + +function parseString(string_) { + var object = {}; + var pairs = string_.split('&'); + var pair; + var pos; + + for (var i = 0, length_ = pairs.length; i < length_; ++i) { + pair = pairs[i]; + pos = pair.indexOf('='); + + if (pos === -1) { + object[decodeURIComponent(pair)] = ''; + } else { + object[decodeURIComponent(pair.slice(0, pos))] = decodeURIComponent(pair.slice(pos + 1)); + } + } + + return object; +} + +request.parseString = parseString; +request.types = { + html: 'text/html', + json: 'application/json', + xml: 'text/xml', + urlencoded: 'application/x-www-form-urlencoded', + form: 'application/x-www-form-urlencoded', + 'form-data': 'application/x-www-form-urlencoded' +}; +request.serialize = { + 'application/x-www-form-urlencoded': qs.stringify, + 'application/json': safeStringify +}; +request.parse = { + 'application/x-www-form-urlencoded': parseString, + 'application/json': JSON.parse +}; + +function parseHeader(string_) { + var lines = string_.split(/\r?\n/); + var fields = {}; + var index; + var line; + var field; + var value; + + for (var i = 0, length_ = lines.length; i < length_; ++i) { + line = lines[i]; + index = line.indexOf(':'); + + if (index === -1) { + continue; + } + + field = line.slice(0, index).toLowerCase(); + value = trim(line.slice(index + 1)); + fields[field] = value; + } + + return fields; +} + +function isJSON(mime) { + return /[/+]json($|[^-\w])/i.test(mime); +} + +function Response(request_) { + this.req = request_; + this.xhr = this.req.xhr; + this.text = this.req.method !== 'HEAD' && (this.xhr.responseType === '' || this.xhr.responseType === 'text') || typeof this.xhr.responseType === 'undefined' ? this.xhr.responseText : null; + this.statusText = this.req.xhr.statusText; + var status = this.xhr.status; + + if (status === 1223) { + status = 204; + } + + this._setStatusProperties(status); + + this.headers = parseHeader(this.xhr.getAllResponseHeaders()); + this.header = this.headers; + this.header['content-type'] = this.xhr.getResponseHeader('content-type'); + + this._setHeaderProperties(this.header); + + if (this.text === null && request_._responseType) { + this.body = this.xhr.response; + } else { + this.body = this.req.method === 'HEAD' ? null : this._parseBody(this.text ? this.text : this.xhr.response); + } +} + +mixin(Response.prototype, ResponseBase.prototype); + +Response.prototype._parseBody = function (string_) { + var parse = request.parse[this.type]; + + if (this.req._parser) { + return this.req._parser(this, string_); + } + + if (!parse && isJSON(this.type)) { + parse = request.parse['application/json']; + } + + return parse && string_ && (string_.length > 0 || string_ instanceof Object) ? parse(string_) : null; +}; + +Response.prototype.toError = function () { + var req = this.req; + var method = req.method; + var url = req.url; + var message = "cannot ".concat(method, " ").concat(url, " (").concat(this.status, ")"); + var error = new Error(message); + error.status = this.status; + error.method = method; + error.url = url; + return error; +}; + +request.Response = Response; + +function Request(method, url) { + var self = this; + this._query = this._query || []; + this.method = method; + this.url = url; + this.header = {}; + this._header = {}; + this.on('end', function () { + var error = null; + var res = null; + + try { + res = new Response(self); + } catch (err) { + error = new Error('Parser is unable to parse the response'); + error.parse = true; + error.original = err; + + if (self.xhr) { + error.rawResponse = typeof self.xhr.responseType === 'undefined' ? self.xhr.responseText : self.xhr.response; + error.status = self.xhr.status ? self.xhr.status : null; + error.statusCode = error.status; + } else { + error.rawResponse = null; + error.status = null; + } + + return self.callback(error); + } + + self.emit('response', res); + var new_error; + + try { + if (!self._isResponseOK(res)) { + new_error = new Error(res.statusText || res.text || 'Unsuccessful HTTP response'); + } + } catch (err) { + new_error = err; + } + + if (new_error) { + new_error.original = error; + new_error.response = res; + new_error.status = res.status; + self.callback(new_error, res); + } else { + self.callback(null, res); + } + }); +} + +Emitter(Request.prototype); +mixin(Request.prototype, RequestBase.prototype); + +Request.prototype.type = function (type) { + this.set('Content-Type', request.types[type] || type); + return this; +}; + +Request.prototype.accept = function (type) { + this.set('Accept', request.types[type] || type); + return this; +}; + +Request.prototype.auth = function (user, pass, options) { + if (arguments.length === 1) pass = ''; + + if (_typeof(pass) === 'object' && pass !== null) { + options = pass; + pass = ''; + } + + if (!options) { + options = { + type: typeof btoa === 'function' ? 'basic' : 'auto' + }; + } + + var encoder = options.encoder ? options.encoder : function (string) { + if (typeof btoa === 'function') { + return btoa(string); + } + + throw new Error('Cannot use basic auth, btoa is not a function'); + }; + return this._auth(user, pass, options, encoder); +}; + +Request.prototype.query = function (value) { + if (typeof value !== 'string') value = serialize(value); + if (value) this._query.push(value); + return this; +}; + +Request.prototype.attach = function (field, file, options) { + if (file) { + if (this._data) { + throw new Error("superagent can't mix .send() and .attach()"); + } + + this._getFormData().append(field, file, options || file.name); + } + + return this; +}; + +Request.prototype._getFormData = function () { + if (!this._formData) { + this._formData = new root.FormData(); + } + + return this._formData; +}; + +Request.prototype.callback = function (error, res) { + if (this._shouldRetry(error, res)) { + return this._retry(); + } + + var fn = this._callback; + this.clearTimeout(); + + if (error) { + if (this._maxRetries) error.retries = this._retries - 1; + this.emit('error', error); + } + + fn(error, res); +}; + +Request.prototype.crossDomainError = function () { + var error = new Error('Request has been terminated\nPossible causes: the network is offline, Origin is not allowed by Access-Control-Allow-Origin, the page is being unloaded, etc.'); + error.crossDomain = true; + error.status = this.status; + error.method = this.method; + error.url = this.url; + this.callback(error); +}; + +Request.prototype.agent = function () { + console.warn('This is not supported in browser version of superagent'); + return this; +}; + +Request.prototype.ca = Request.prototype.agent; +Request.prototype.buffer = Request.prototype.ca; + +Request.prototype.write = function () { + throw new Error('Streaming is not supported in browser version of superagent'); +}; + +Request.prototype.pipe = Request.prototype.write; + +Request.prototype._isHost = function (object) { + return object && _typeof(object) === 'object' && !Array.isArray(object) && Object.prototype.toString.call(object) !== '[object Object]'; +}; + +Request.prototype.end = function (fn) { + if (this._endCalled) { + console.warn('Warning: .end() was called twice. This is not supported in superagent'); + } + + this._endCalled = true; + this._callback = fn || noop; + + this._finalizeQueryString(); + + this._end(); +}; + +Request.prototype._setUploadTimeout = function () { + var self = this; + + if (this._uploadTimeout && !this._uploadTimeoutTimer) { + this._uploadTimeoutTimer = setTimeout(function () { + self._timeoutError('Upload timeout of ', self._uploadTimeout, 'ETIMEDOUT'); + }, this._uploadTimeout); + } +}; + +Request.prototype._end = function () { + if (this._aborted) return this.callback(new Error('The request has been aborted even before .end() was called')); + var self = this; + this.xhr = request.getXHR(); + var xhr = this.xhr; + var data = this._formData || this._data; + + this._setTimeouts(); + + xhr.addEventListener('readystatechange', function () { + var readyState = xhr.readyState; + + if (readyState >= 2 && self._responseTimeoutTimer) { + clearTimeout(self._responseTimeoutTimer); + } + + if (readyState !== 4) { + return; + } + + var status; + + try { + status = xhr.status; + } catch (_unused5) { + status = 0; + } + + if (!status) { + if (self.timedout || self._aborted) return; + return self.crossDomainError(); + } + + self.emit('end'); + }); + + var handleProgress = function handleProgress(direction, e) { + if (e.total > 0) { + e.percent = e.loaded / e.total * 100; + + if (e.percent === 100) { + clearTimeout(self._uploadTimeoutTimer); + } + } + + e.direction = direction; + self.emit('progress', e); + }; + + if (this.hasListeners('progress')) { + try { + xhr.addEventListener('progress', handleProgress.bind(null, 'download')); + + if (xhr.upload) { + xhr.upload.addEventListener('progress', handleProgress.bind(null, 'upload')); + } + } catch (_unused6) {} + } + + if (xhr.upload) { + this._setUploadTimeout(); + } + + try { + if (this.username && this.password) { + xhr.open(this.method, this.url, true, this.username, this.password); + } else { + xhr.open(this.method, this.url, true); + } + } catch (err) { + return this.callback(err); + } + + if (this._withCredentials) xhr.withCredentials = true; + + if (!this._formData && this.method !== 'GET' && this.method !== 'HEAD' && typeof data !== 'string' && !this._isHost(data)) { + var contentType = this._header['content-type']; + + var _serialize = this._serializer || request.serialize[contentType ? contentType.split(';')[0] : '']; + + if (!_serialize && isJSON(contentType)) { + _serialize = request.serialize['application/json']; + } + + if (_serialize) data = _serialize(data); + } + + for (var field in this.header) { + if (this.header[field] === null) continue; + if (hasOwn(this.header, field)) xhr.setRequestHeader(field, this.header[field]); + } + + if (this._responseType) { + xhr.responseType = this._responseType; + } + + this.emit('request', this); + xhr.send(typeof data === 'undefined' ? null : data); +}; + +request.agent = function () { + return new Agent(); +}; + +var _loop = function _loop() { + var method = _arr[_i]; + + Agent.prototype[method.toLowerCase()] = function (url, fn) { + var request_ = new request.Request(method, url); + + this._setDefaults(request_); + + if (fn) { + request_.end(fn); + } + + return request_; + }; +}; + +for (var _i = 0, _arr = ['GET', 'POST', 'OPTIONS', 'PATCH', 'PUT', 'DELETE']; _i < _arr.length; _i++) { + _loop(); +} + +Agent.prototype.del = Agent.prototype.delete; + +request.get = function (url, data, fn) { + var request_ = request('GET', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.query(data); + if (fn) request_.end(fn); + return request_; +}; + +request.head = function (url, data, fn) { + var request_ = request('HEAD', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.query(data); + if (fn) request_.end(fn); + return request_; +}; + +request.options = function (url, data, fn) { + var request_ = request('OPTIONS', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; + +function del(url, data, fn) { + var request_ = request('DELETE', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +} + +request.del = del; +request.delete = del; + +request.patch = function (url, data, fn) { + var request_ = request('PATCH', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; + +request.post = function (url, data, fn) { + var request_ = request('POST', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; + +request.put = function (url, data, fn) { + var request_ = request('PUT', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; + +},{"./agent-base":20,"./request-base":22,"./response-base":23,"./utils":24,"component-emitter":4,"fast-safe-stringify":5,"qs":15}],22:[function(require,module,exports){ +(function (process){(function (){ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var semver = require('semver'); + +var _require = require('./utils'), + isObject = _require.isObject, + hasOwn = _require.hasOwn; + +module.exports = RequestBase; + +function RequestBase() {} + +RequestBase.prototype.clearTimeout = function () { + clearTimeout(this._timer); + clearTimeout(this._responseTimeoutTimer); + clearTimeout(this._uploadTimeoutTimer); + delete this._timer; + delete this._responseTimeoutTimer; + delete this._uploadTimeoutTimer; + return this; +}; + +RequestBase.prototype.parse = function (fn) { + this._parser = fn; + return this; +}; + +RequestBase.prototype.responseType = function (value) { + this._responseType = value; + return this; +}; + +RequestBase.prototype.serialize = function (fn) { + this._serializer = fn; + return this; +}; + +RequestBase.prototype.timeout = function (options) { + if (!options || _typeof(options) !== 'object') { + this._timeout = options; + this._responseTimeout = 0; + this._uploadTimeout = 0; + return this; + } + + for (var option in options) { + if (hasOwn(options, option)) { + switch (option) { + case 'deadline': + this._timeout = options.deadline; + break; + + case 'response': + this._responseTimeout = options.response; + break; + + case 'upload': + this._uploadTimeout = options.upload; + break; + + default: + console.warn('Unknown timeout option', option); + } + } + } + + return this; +}; + +RequestBase.prototype.retry = function (count, fn) { + if (arguments.length === 0 || count === true) count = 1; + if (count <= 0) count = 0; + this._maxRetries = count; + this._retries = 0; + this._retryCallback = fn; + return this; +}; + +var ERROR_CODES = new Set(['ETIMEDOUT', 'ECONNRESET', 'EADDRINUSE', 'ECONNREFUSED', 'EPIPE', 'ENOTFOUND', 'ENETUNREACH', 'EAI_AGAIN']); +var STATUS_CODES = new Set([408, 413, 429, 500, 502, 503, 504, 521, 522, 524]); + +RequestBase.prototype._shouldRetry = function (error, res) { + if (!this._maxRetries || this._retries++ >= this._maxRetries) { + return false; + } + + if (this._retryCallback) { + try { + var override = this._retryCallback(error, res); + + if (override === true) return true; + if (override === false) return false; + } catch (err) { + console.error(err); + } + } + + if (res && res.status && STATUS_CODES.has(res.status)) return true; + + if (error) { + if (error.code && ERROR_CODES.has(error.code)) return true; + if (error.timeout && error.code === 'ECONNABORTED') return true; + if (error.crossDomain) return true; + } + + return false; +}; + +RequestBase.prototype._retry = function () { + this.clearTimeout(); + + if (this.req) { + this.req = null; + this.req = this.request(); + } + + this._aborted = false; + this.timedout = false; + this.timedoutError = null; + return this._end(); +}; + +RequestBase.prototype.then = function (resolve, reject) { + var _this = this; + + if (!this._fullfilledPromise) { + var self = this; + + if (this._endCalled) { + console.warn('Warning: superagent request was sent twice, because both .end() and .then() were called. Never call .end() if you use promises'); + } + + this._fullfilledPromise = new Promise(function (resolve, reject) { + self.on('abort', function () { + if (_this._maxRetries && _this._maxRetries > _this._retries) { + return; + } + + if (_this.timedout && _this.timedoutError) { + reject(_this.timedoutError); + return; + } + + var error = new Error('Aborted'); + error.code = 'ABORTED'; + error.status = _this.status; + error.method = _this.method; + error.url = _this.url; + reject(error); + }); + self.end(function (error, res) { + if (error) reject(error);else resolve(res); + }); + }); + } + + return this._fullfilledPromise.then(resolve, reject); +}; + +RequestBase.prototype.catch = function (callback) { + return this.then(undefined, callback); +}; + +RequestBase.prototype.use = function (fn) { + fn(this); + return this; +}; + +RequestBase.prototype.ok = function (callback) { + if (typeof callback !== 'function') throw new Error('Callback required'); + this._okCallback = callback; + return this; +}; + +RequestBase.prototype._isResponseOK = function (res) { + if (!res) { + return false; + } + + if (this._okCallback) { + return this._okCallback(res); + } + + return res.status >= 200 && res.status < 300; +}; + +RequestBase.prototype.get = function (field) { + return this._header[field.toLowerCase()]; +}; + +RequestBase.prototype.getHeader = RequestBase.prototype.get; + +RequestBase.prototype.set = function (field, value) { + if (isObject(field)) { + for (var key in field) { + if (hasOwn(field, key)) this.set(key, field[key]); + } + + return this; + } + + this._header[field.toLowerCase()] = value; + this.header[field] = value; + return this; +}; + +RequestBase.prototype.unset = function (field) { + delete this._header[field.toLowerCase()]; + delete this.header[field]; + return this; +}; + +RequestBase.prototype.field = function (name, value, options) { + if (name === null || undefined === name) { + throw new Error('.field(name, val) name can not be empty'); + } + + if (this._data) { + throw new Error(".field() can't be used if .send() is used. Please use only .send() or only .field() & .attach()"); + } + + if (isObject(name)) { + for (var key in name) { + if (hasOwn(name, key)) this.field(key, name[key]); + } + + return this; + } + + if (Array.isArray(value)) { + for (var i in value) { + if (hasOwn(value, i)) this.field(name, value[i]); + } + + return this; + } + + if (value === null || undefined === value) { + throw new Error('.field(name, val) val can not be empty'); + } + + if (typeof value === 'boolean') { + value = String(value); + } + + if (options) this._getFormData().append(name, value, options);else this._getFormData().append(name, value); + return this; +}; + +RequestBase.prototype.abort = function () { + if (this._aborted) { + return this; + } + + this._aborted = true; + if (this.xhr) this.xhr.abort(); + + if (this.req) { + if (semver.gte(process.version, 'v13.0.0') && semver.lt(process.version, 'v14.0.0')) { + throw new Error('Superagent does not work in v13 properly with abort() due to Node.js core changes'); + } else if (semver.gte(process.version, 'v14.0.0')) { + this.req.destroyed = true; + } + + this.req.abort(); + } + + this.clearTimeout(); + this.emit('abort'); + return this; +}; + +RequestBase.prototype._auth = function (user, pass, options, base64Encoder) { + switch (options.type) { + case 'basic': + this.set('Authorization', "Basic ".concat(base64Encoder("".concat(user, ":").concat(pass)))); + break; + + case 'auto': + this.username = user; + this.password = pass; + break; + + case 'bearer': + this.set('Authorization', "Bearer ".concat(user)); + break; + + default: + break; + } + + return this; +}; + +RequestBase.prototype.withCredentials = function (on) { + if (on === undefined) on = true; + this._withCredentials = on; + return this; +}; + +RequestBase.prototype.redirects = function (n) { + this._maxRedirects = n; + return this; +}; + +RequestBase.prototype.maxResponseSize = function (n) { + if (typeof n !== 'number') { + throw new TypeError('Invalid argument'); + } + + this._maxResponseSize = n; + return this; +}; + +RequestBase.prototype.toJSON = function () { + return { + method: this.method, + url: this.url, + data: this._data, + headers: this._header + }; +}; + +RequestBase.prototype.send = function (data) { + var isObject_ = isObject(data); + var type = this._header['content-type']; + + if (this._formData) { + throw new Error(".send() can't be used if .attach() or .field() is used. Please use only .send() or only .field() & .attach()"); + } + + if (isObject_ && !this._data) { + if (Array.isArray(data)) { + this._data = []; + } else if (!this._isHost(data)) { + this._data = {}; + } + } else if (data && this._data && this._isHost(this._data)) { + throw new Error("Can't merge these send calls"); + } + + if (isObject_ && isObject(this._data)) { + for (var key in data) { + if (hasOwn(data, key)) this._data[key] = data[key]; + } + } else if (typeof data === 'string') { + if (!type) this.type('form'); + type = this._header['content-type']; + if (type) type = type.toLowerCase().trim(); + + if (type === 'application/x-www-form-urlencoded') { + this._data = this._data ? "".concat(this._data, "&").concat(data) : data; + } else { + this._data = (this._data || '') + data; + } + } else { + this._data = data; + } + + if (!isObject_ || this._isHost(data)) { + return this; + } + + if (!type) this.type('json'); + return this; +}; + +RequestBase.prototype.sortQuery = function (sort) { + this._sort = typeof sort === 'undefined' ? true : sort; + return this; +}; + +RequestBase.prototype._finalizeQueryString = function () { + var query = this._query.join('&'); + + if (query) { + this.url += (this.url.includes('?') ? '&' : '?') + query; + } + + this._query.length = 0; + + if (this._sort) { + var index = this.url.indexOf('?'); + + if (index >= 0) { + var queryArray = this.url.slice(index + 1).split('&'); + + if (typeof this._sort === 'function') { + queryArray.sort(this._sort); + } else { + queryArray.sort(); + } + + this.url = this.url.slice(0, index) + '?' + queryArray.join('&'); + } + } +}; + +RequestBase.prototype._appendQueryString = function () { + console.warn('Unsupported'); +}; + +RequestBase.prototype._timeoutError = function (reason, timeout, errno) { + if (this._aborted) { + return; + } + + var error = new Error("".concat(reason + timeout, "ms exceeded")); + error.timeout = timeout; + error.code = 'ECONNABORTED'; + error.errno = errno; + this.timedout = true; + this.timedoutError = error; + this.abort(); + this.callback(error); +}; + +RequestBase.prototype._setTimeouts = function () { + var self = this; + + if (this._timeout && !this._timer) { + this._timer = setTimeout(function () { + self._timeoutError('Timeout of ', self._timeout, 'ETIME'); + }, this._timeout); + } + + if (this._responseTimeout && !this._responseTimeoutTimer) { + this._responseTimeoutTimer = setTimeout(function () { + self._timeoutError('Response timeout of ', self._responseTimeout, 'ETIMEDOUT'); + }, this._responseTimeout); + } +}; + +}).call(this)}).call(this,require('_process')) +},{"./utils":24,"_process":13,"semver":1}],23:[function(require,module,exports){ +"use strict"; + +var utils = require('./utils'); + +module.exports = ResponseBase; + +function ResponseBase() {} + +ResponseBase.prototype.get = function (field) { + return this.header[field.toLowerCase()]; +}; + +ResponseBase.prototype._setHeaderProperties = function (header) { + var ct = header['content-type'] || ''; + this.type = utils.type(ct); + var parameters = utils.params(ct); + + for (var key in parameters) { + if (Object.prototype.hasOwnProperty.call(parameters, key)) this[key] = parameters[key]; + } + + this.links = {}; + + try { + if (header.link) { + this.links = utils.parseLinks(header.link); + } + } catch (_unused) {} +}; + +ResponseBase.prototype._setStatusProperties = function (status) { + var type = Math.trunc(status / 100); + this.statusCode = status; + this.status = this.statusCode; + this.statusType = type; + this.info = type === 1; + this.ok = type === 2; + this.redirect = type === 3; + this.clientError = type === 4; + this.serverError = type === 5; + this.error = type === 4 || type === 5 ? this.toError() : false; + this.created = status === 201; + this.accepted = status === 202; + this.noContent = status === 204; + this.badRequest = status === 400; + this.unauthorized = status === 401; + this.notAcceptable = status === 406; + this.forbidden = status === 403; + this.notFound = status === 404; + this.unprocessableEntity = status === 422; +}; + +},{"./utils":24}],24:[function(require,module,exports){ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +exports.type = function (string_) { + return string_.split(/ *; */).shift(); +}; + +exports.params = function (value) { + var object = {}; + + var _iterator = _createForOfIteratorHelper(value.split(/ *; */)), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var string_ = _step.value; + var parts = string_.split(/ *= */); + var key = parts.shift(); + + var _value = parts.shift(); + + if (key && _value) object[key] = _value; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return object; +}; + +exports.parseLinks = function (value) { + var object = {}; + + var _iterator2 = _createForOfIteratorHelper(value.split(/ *, */)), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var string_ = _step2.value; + var parts = string_.split(/ *; */); + var url = parts[0].slice(1, -1); + var rel = parts[1].split(/ *= */)[1].slice(1, -1); + object[rel] = url; + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + + return object; +}; + +exports.cleanHeader = function (header, changesOrigin) { + delete header['content-type']; + delete header['content-length']; + delete header['transfer-encoding']; + delete header.host; + + if (changesOrigin) { + delete header.authorization; + delete header.cookie; + } + + return header; +}; + +exports.isObject = function (object) { + return object !== null && _typeof(object) === 'object'; +}; + +exports.hasOwn = Object.hasOwn || function (object, property) { + if (object == null) { + throw new TypeError('Cannot convert undefined or null to object'); + } + + return Object.prototype.hasOwnProperty.call(new Object(object), property); +}; + +exports.mixin = function (target, source) { + for (var key in source) { + if (exports.hasOwn(source, key)) { + target[key] = source[key]; + } + } +}; + +},{}]},{},[21])(21) +}); diff --git a/node_modules/superagent/dist/superagent.min.js b/node_modules/superagent/dist/superagent.min.js new file mode 100644 index 0000000..d216067 --- /dev/null +++ b/node_modules/superagent/dist/superagent.min.js @@ -0,0 +1 @@ +!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).superagent=t()}}((function(){var t={exports:{}};function e(t){if(t)return function(t){for(var r in e.prototype)t[r]=e.prototype[r];return t}(t)}t.exports=e,e.prototype.on=e.prototype.addEventListener=function(t,e){return this._callbacks=this._callbacks||{},(this._callbacks["$"+t]=this._callbacks["$"+t]||[]).push(e),this},e.prototype.once=function(t,e){function r(){this.off(t,r),e.apply(this,arguments)}return r.fn=e,this.on(t,r),this},e.prototype.off=e.prototype.removeListener=e.prototype.removeAllListeners=e.prototype.removeEventListener=function(t,e){if(this._callbacks=this._callbacks||{},0==arguments.length)return this._callbacks={},this;var r,o=this._callbacks["$"+t];if(!o)return this;if(1==arguments.length)return delete this._callbacks["$"+t],this;for(var n=0;nc.depthLimit)return void s("[...]",e,r,a);if(void 0!==c.edgesLimit&&n+1>c.edgesLimit)return void s("[...]",e,r,a);if(i.push(e),Array.isArray(e))for(l=0;le?1:0}function l(t,e,r,u){void 0===u&&(u=a());var l,f=function t(e,r,i,a,u,l,p){var f;if(l+=1,"object"==o(e)&&null!==e){for(f=0;fp.depthLimit)return void s("[...]",e,r,u);if(void 0!==p.edgesLimit&&i+1>p.edgesLimit)return void s("[...]",e,r,u);if(a.push(e),Array.isArray(e))for(f=0;f0)for(var o=0;o1&&"boolean"!=typeof e)throw new _('"allowMissing" argument must be a boolean');var r=function(t){var e=U(t,0,1),r=U(t,-1);if("%"===e&&"%"!==r)throw new S("invalid intrinsic syntax, expected closing `%`");if("%"===r&&"%"!==e)throw new S("invalid intrinsic syntax, expected opening `%`");var o=[];return D(t,M,(function(t,e,r,n){o[o.length]=r?D(n,L,"$1"):e||t})),o}(t),o=r.length>0?r[0]:"",n=q("%"+o+"%",e),i=n.name,a=n.value,u=!1,s=n.alias;s&&(o=s[0],F(r,N([0,1],s)));for(var c=1,l=!0;c=r.length){var h=O(a,p);a=(l=!!h)&&"get"in h&&!("originalValue"in h.get)?h.get:a[p]}else l=v(a,p),a=a[p];l&&!u&&(I[i]=a)}}return a},B=H("%Function.prototype.apply%"),W=H("%Function.prototype.call%"),z=H("%Reflect.apply%",!0)||b.call(W,B),$=H("%Object.getOwnPropertyDescriptor%",!0),G=H("%Object.defineProperty%",!0),J=H("%Math.max%");if(G)try{G({},"a",{value:1})}catch(Or){G=null}j=function(t){var e=z(b,W,arguments);return $&&G&&$(e,"length").configurable&&G(e,"length",{value:1+J(0,t.length-(arguments.length-1))}),e};var X=function(){return z(b,B,arguments)};G?G(j,"apply",{value:X}):j.apply=X;var V=j(H("String.prototype.indexOf")),Q=function(t,e){var r=H(t,!!e);return"function"==typeof r&&V(t,".prototype.")>-1?j(r):r},K={};function Y(t){return(Y="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}var Z="function"==typeof Map&&Map.prototype,tt=Object.getOwnPropertyDescriptor&&Z?Object.getOwnPropertyDescriptor(Map.prototype,"size"):null,et=Z&&tt&&"function"==typeof tt.get?tt.get:null,rt=Z&&Map.prototype.forEach,ot="function"==typeof Set&&Set.prototype,nt=Object.getOwnPropertyDescriptor&&ot?Object.getOwnPropertyDescriptor(Set.prototype,"size"):null,it=ot&&nt&&"function"==typeof nt.get?nt.get:null,at=ot&&Set.prototype.forEach,ut="function"==typeof WeakMap&&WeakMap.prototype?WeakMap.prototype.has:null,st="function"==typeof WeakSet&&WeakSet.prototype?WeakSet.prototype.has:null,ct="function"==typeof WeakRef&&WeakRef.prototype?WeakRef.prototype.deref:null,lt=Boolean.prototype.valueOf,pt=Object.prototype.toString,ft=Function.prototype.toString,yt=String.prototype.match,ht=String.prototype.slice,dt=String.prototype.replace,mt=String.prototype.toUpperCase,bt=String.prototype.toLowerCase,vt=RegExp.prototype.test,gt=Array.prototype.concat,St=Array.prototype.join,wt=Array.prototype.slice,_t=Math.floor,At="function"==typeof BigInt?BigInt.prototype.valueOf:null,Ot=Object.getOwnPropertySymbols,jt="function"==typeof Symbol&&"symbol"==Y(Symbol.iterator)?Symbol.prototype.toString:null,Et="function"==typeof Symbol&&"object"==Y(Symbol.iterator),Tt="function"==typeof Symbol&&Symbol.toStringTag&&(Y(Symbol.toStringTag),1)?Symbol.toStringTag:null,Pt=Object.prototype.propertyIsEnumerable,xt=("function"==typeof Reflect?Reflect.getPrototypeOf:Object.getPrototypeOf)||([].__proto__===Array.prototype?function(t){return t.__proto__}:null);function kt(t,e){if(t===1/0||t===-1/0||t!=t||t&&t>-1e3&&t<1e3||vt.call(/e/,e))return e;var r=/[0-9](?=(?:[0-9]{3})+(?![0-9]))/g;if("number"==typeof t){var o=t<0?-_t(-t):_t(t);if(o!==t){var n=String(o),i=ht.call(e,n.length+1);return dt.call(n,r,"$&_")+"."+dt.call(dt.call(i,/([0-9]{3})/g,"$&_"),/_$/,"")}}return dt.call(e,r,"$&_")}var Rt=K.custom,It=Ut(Rt)?Rt:null;function Ct(t,e,r){var o="double"===(r.quoteStyle||e)?'"':"'";return o+t+o}function Nt(t){return dt.call(String(t),/"/g,""")}function Ft(t){return!("[object Array]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}function Dt(t){return!("[object RegExp]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}function Ut(t){if(Et)return t&&"object"==Y(t)&&t instanceof Symbol;if("symbol"==Y(t))return!0;if(!t||"object"!=Y(t)||!jt)return!1;try{return jt.call(t),!0}catch(Or){}return!1}var Mt=Object.prototype.hasOwnProperty||function(t){return t in this};function Lt(t,e){return Mt.call(t,e)}function qt(t){return pt.call(t)}function Ht(t,e){if(t.indexOf)return t.indexOf(e);for(var r=0,o=t.length;r0))throw new TypeError('option "indent" must be "\\t", an integer > 0, or `null`');if(Lt(i,"numericSeparator")&&"boolean"!=typeof i.numericSeparator)throw new TypeError('option "numericSeparator", if provided, must be `true` or `false`');var u=i.numericSeparator;if(void 0===e)return"undefined";if(null===e)return"null";if("boolean"==typeof e)return e?"true":"false";if("string"==typeof e)return function t(e,r){if(e.length>r.maxStringLength){var o=e.length-r.maxStringLength,n="... "+o+" more character"+(o>1?"s":"");return t(ht.call(e,0,r.maxStringLength),r)+n}return Ct(dt.call(dt.call(e,/(['\\])/g,"\\$1"),/[\x00-\x1f]/g,Bt),"single",r)}(e,i);if("number"==typeof e){if(0===e)return 1/0/e>0?"0":"-0";var s=String(e);return u?kt(e,s):s}if("bigint"==typeof e){var c=String(e)+"n";return u?kt(e,c):c}var l=void 0===i.depth?5:i.depth;if(void 0===o&&(o=0),o>=l&&l>0&&"object"==Y(e))return Ft(e)?"[Array]":"[Object]";var p,f=function(t,e){var r;if("\t"===t.indent)r="\t";else{if(!("number"==typeof t.indent&&t.indent>0))return null;r=St.call(Array(t.indent+1)," ")}return{base:r,prev:St.call(Array(e+1),r)}}(i,o);if(void 0===n)n=[];else if(Ht(n,e)>=0)return"[Circular]";function y(e,r,a){if(r&&(n=wt.call(n)).push(r),a){var u={depth:i.depth};return Lt(i,"quoteStyle")&&(u.quoteStyle=i.quoteStyle),t(e,u,o+1,n)}return t(e,i,o+1,n)}if("function"==typeof e&&!Dt(e)){var h=function(t){if(t.name)return t.name;var e=yt.call(ft.call(t),/^function\s*([\w$]+)/);return e?e[1]:null}(e),d=Jt(e,y);return"[Function"+(h?": "+h:" (anonymous)")+"]"+(d.length>0?" { "+St.call(d,", ")+" }":"")}if(Ut(e)){var m=Et?dt.call(String(e),/^(Symbol\(.*\))_[^)]*$/,"$1"):jt.call(e);return"object"!=Y(e)||Et?m:Wt(m)}if((p=e)&&"object"==Y(p)&&("undefined"!=typeof HTMLElement&&p instanceof HTMLElement||"string"==typeof p.nodeName&&"function"==typeof p.getAttribute)){for(var b="<"+bt.call(String(e.nodeName)),v=e.attributes||[],g=0;g"}if(Ft(e)){if(0===e.length)return"[]";var S=Jt(e,y);return f&&!function(t){for(var e=0;e=0)return!1;return!0}(S)?"["+Gt(S,f)+"]":"[ "+St.call(S,", ")+" ]"}if(function(t){return!("[object Error]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}(e)){var w=Jt(e,y);return"cause"in Error.prototype||!("cause"in e)||Pt.call(e,"cause")?0===w.length?"["+String(e)+"]":"{ ["+String(e)+"] "+St.call(w,", ")+" }":"{ ["+String(e)+"] "+St.call(gt.call("[cause]: "+y(e.cause),w),", ")+" }"}if("object"==Y(e)&&a){if(It&&"function"==typeof e[It]&&K)return K(e,{depth:l-o});if("symbol"!==a&&"function"==typeof e.inspect)return e.inspect()}if(function(t){if(!et||!t||"object"!=Y(t))return!1;try{et.call(t);try{it.call(t)}catch(b){return!0}return t instanceof Map}catch(Or){}return!1}(e)){var _=[];return rt.call(e,(function(t,r){_.push(y(r,e,!0)+" => "+y(t,e))})),$t("Map",et.call(e),_,f)}if(function(t){if(!it||!t||"object"!=Y(t))return!1;try{it.call(t);try{et.call(t)}catch(e){return!0}return t instanceof Set}catch(Or){}return!1}(e)){var A=[];return at.call(e,(function(t){A.push(y(t,e))})),$t("Set",it.call(e),A,f)}if(function(t){if(!ut||!t||"object"!=Y(t))return!1;try{ut.call(t,ut);try{st.call(t,st)}catch(b){return!0}return t instanceof WeakMap}catch(Or){}return!1}(e))return zt("WeakMap");if(function(t){if(!st||!t||"object"!=Y(t))return!1;try{st.call(t,st);try{ut.call(t,ut)}catch(b){return!0}return t instanceof WeakSet}catch(Or){}return!1}(e))return zt("WeakSet");if(function(t){if(!ct||!t||"object"!=Y(t))return!1;try{return ct.call(t),!0}catch(Or){}return!1}(e))return zt("WeakRef");if(function(t){return!("[object Number]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}(e))return Wt(y(Number(e)));if(function(t){if(!t||"object"!=Y(t)||!At)return!1;try{return At.call(t),!0}catch(Or){}return!1}(e))return Wt(y(At.call(e)));if(function(t){return!("[object Boolean]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}(e))return Wt(lt.call(e));if(function(t){return!("[object String]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}(e))return Wt(y(String(e)));if(!function(t){return!("[object Date]"!==qt(t)||Tt&&"object"==Y(t)&&Tt in t)}(e)&&!Dt(e)){var O=Jt(e,y),j=xt?xt(e)===Object.prototype:e instanceof Object||e.constructor===Object,E=e instanceof Object?"":"null prototype",T=!j&&Tt&&Object(e)===e&&Tt in e?ht.call(qt(e),8,-1):E?"Object":"",P=(j||"function"!=typeof e.constructor?"":e.constructor.name?e.constructor.name+" ":"")+(T||E?"["+St.call(gt.call([],T||[],E||[]),": ")+"] ":"");return 0===O.length?P+"{}":f?P+"{"+Gt(O,f)+"}":P+"{ "+St.call(O,", ")+" }"}return String(e)}(t))},get:function(o){if(Qt&&o&&("object"==Xt(o)||"function"==typeof o)){if(t)return Yt(t,o)}else if(Kt){if(e)return ee(e,o)}else if(r)return function(t,e){var r=ne(t,e);return r&&r.value}(r,o)},has:function(o){if(Qt&&o&&("object"==Xt(o)||"function"==typeof o)){if(t)return te(t,o)}else if(Kt){if(e)return oe(e,o)}else if(r)return function(t,e){return!!ne(t,e)}(r,o);return!1},set:function(o,n){Qt&&o&&("object"==Xt(o)||"function"==typeof o)?(t||(t=new Qt),Zt(t,o,n)):Kt?(e||(e=new Kt),re(e,o,n)):(r||(r={key:{},next:null}),function(t,e,r){var o=ne(t,e);o?o.value=r:t.next={key:e,next:t.next,value:r}}(r,o,n))}};return o},ae=String.prototype.replace,ue=/%20/g,se={default:"RFC3986",formatters:{RFC1738:function(t){return ae.call(t,ue,"+")},RFC3986:function(t){return String(t)}},RFC1738:"RFC1738",RFC3986:"RFC3986"};function ce(t){return(ce="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}var le=Object.prototype.hasOwnProperty,pe=Array.isArray,fe=function(){for(var t=[],e=0;e<256;++e)t.push("%"+((e<16?"0":"")+e.toString(16)).toUpperCase());return t}(),ye={combine:function(t,e){return[].concat(t,e)},compact:function(t){for(var e=[{obj:{o:t},prop:"o"}],r=[],o=0;o1;){var e=t.pop(),r=e.obj[e.prop];if(pe(r)){for(var o=[],n=0;n=48&&s<=57||s>=65&&s<=90||s>=97&&s<=122||n===se.RFC1738&&(40===s||41===s)?a+=i.charAt(u):s<128?a+=fe[s]:s<2048?a+=fe[192|s>>6]+fe[128|63&s]:s<55296||s>=57344?a+=fe[224|s>>12]+fe[128|s>>6&63]+fe[128|63&s]:(u+=1,s=65536+((1023&s)<<10|1023&i.charCodeAt(u)),a+=fe[240|s>>18]+fe[128|s>>12&63]+fe[128|s>>6&63]+fe[128|63&s])}return a},isBuffer:function(t){return!(!t||"object"!=ce(t)||!(t.constructor&&t.constructor.isBuffer&&t.constructor.isBuffer(t)))},isRegExp:function(t){return"[object RegExp]"===Object.prototype.toString.call(t)},maybeMap:function(t,e){if(pe(t)){for(var r=[],o=0;o0?b.join(",")||null:void 0}];else if(be(u))E=u;else{var P=Object.keys(b);E=s?P.sort(s):P}for(var x=0;x0?f+p:""}}),Te={};function Pe(t){return(Pe="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function xe(t,e){var r="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(!r){if(Array.isArray(t)||(r=function(t,e){if(!t)return;if("string"==typeof t)return ke(t,e);var r=Object.prototype.toString.call(t).slice(8,-1);"Object"===r&&t.constructor&&(r=t.constructor.name);if("Map"===r||"Set"===r)return Array.from(t);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return ke(t,e)}(t))||e&&t&&"number"==typeof t.length){r&&(t=r);var o=0,n=function(){};return{s:n,n:function(){return o>=t.length?{done:!0}:{done:!1,value:t[o++]}},e:function(t){throw t},f:n}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,u=!1;return{s:function(){r=r.call(t)},n:function(){var t=r.next();return a=t.done,t},e:function(t){u=!0,i=t},f:function(){try{a||null==r.return||r.return()}finally{if(u)throw i}}}}function ke(t,e){(null==e||e>t.length)&&(e=t.length);for(var r=0,o=new Array(e);r1)for(var r=1;r=this._maxRetries)return!1;if(this._retryCallback)try{var r=this._retryCallback(t,e);if(!0===r)return!0;if(!1===r)return!1}catch(o){console.error(o)}if(e&&e.status&&a.has(e.status))return!0;if(t){if(t.code&&i.has(t.code))return!0;if(t.timeout&&"ECONNABORTED"===t.code)return!0;if(t.crossDomain)return!0}return!1},n.prototype._retry=function(){return this.clearTimeout(),this.req&&(this.req=null,this.req=this.request()),this._aborted=!1,this.timedout=!1,this.timedoutError=null,this._end()},n.prototype.then=function(t,e){var r=this;if(!this._fullfilledPromise){var o=this;this._endCalled&&console.warn("Warning: superagent request was sent twice, because both .end() and .then() were called. Never call .end() if you use promises"),this._fullfilledPromise=new Promise((function(t,e){o.on("abort",(function(){if(!(r._maxRetries&&r._maxRetries>r._retries))if(r.timedout&&r.timedoutError)e(r.timedoutError);else{var t=new Error("Aborted");t.code="ABORTED",t.status=r.status,t.method=r.method,t.url=r.url,e(t)}})),o.end((function(r,o){r?e(r):t(o)}))}))}return this._fullfilledPromise.then(t,e)},n.prototype.catch=function(t){return this.then(void 0,t)},n.prototype.use=function(t){return t(this),this},n.prototype.ok=function(t){if("function"!=typeof t)throw new Error("Callback required");return this._okCallback=t,this},n.prototype._isResponseOK=function(t){return!!t&&(this._okCallback?this._okCallback(t):t.status>=200&&t.status<300)},n.prototype.get=function(t){return this._header[t.toLowerCase()]},n.prototype.getHeader=n.prototype.get,n.prototype.set=function(t,e){if(r(t)){for(var n in t)o(t,n)&&this.set(n,t[n]);return this}return this._header[t.toLowerCase()]=e,this.header[t]=e,this},n.prototype.unset=function(t){return delete this._header[t.toLowerCase()],delete this.header[t],this},n.prototype.field=function(t,e,n){if(null==t)throw new Error(".field(name, val) name can not be empty");if(this._data)throw new Error(".field() can't be used if .send() is used. Please use only .send() or only .field() & .attach()");if(r(t)){for(var i in t)o(t,i)&&this.field(i,t[i]);return this}if(Array.isArray(e)){for(var a in e)o(e,a)&&this.field(t,e[a]);return this}if(null==e)throw new Error(".field(name, val) val can not be empty");return"boolean"==typeof e&&(e=String(e)),n?this._getFormData().append(t,e,n):this._getFormData().append(t,e),this},n.prototype.abort=function(){if(this._aborted)return this;if(this._aborted=!0,this.xhr&&this.xhr.abort(),this.req){if(K.gte(t.version,"v13.0.0")&&K.lt(t.version,"v14.0.0"))throw new Error("Superagent does not work in v13 properly with abort() due to Node.js core changes");K.gte(t.version,"v14.0.0")&&(this.req.destroyed=!0),this.req.abort()}return this.clearTimeout(),this.emit("abort"),this},n.prototype._auth=function(t,e,r,o){switch(r.type){case"basic":this.set("Authorization","Basic ".concat(o("".concat(t,":").concat(e))));break;case"auto":this.username=t,this.password=e;break;case"bearer":this.set("Authorization","Bearer ".concat(t))}return this},n.prototype.withCredentials=function(t){return void 0===t&&(t=!0),this._withCredentials=t,this},n.prototype.redirects=function(t){return this._maxRedirects=t,this},n.prototype.maxResponseSize=function(t){if("number"!=typeof t)throw new TypeError("Invalid argument");return this._maxResponseSize=t,this},n.prototype.toJSON=function(){return{method:this.method,url:this.url,data:this._data,headers:this._header}},n.prototype.send=function(t){var e=r(t),n=this._header["content-type"];if(this._formData)throw new Error(".send() can't be used if .attach() or .field() is used. Please use only .send() or only .field() & .attach()");if(e&&!this._data)Array.isArray(t)?this._data=[]:this._isHost(t)||(this._data={});else if(t&&this._data&&this._isHost(this._data))throw new Error("Can't merge these send calls");if(e&&r(this._data))for(var i in t)o(t,i)&&(this._data[i]=t[i]);else"string"==typeof t?(n||this.type("form"),(n=this._header["content-type"])&&(n=n.toLowerCase().trim()),this._data="application/x-www-form-urlencoded"===n?this._data?"".concat(this._data,"&").concat(t):t:(this._data||"")+t):this._data=t;return!e||this._isHost(t)||n||this.type("json"),this},n.prototype.sortQuery=function(t){return this._sort=void 0===t||t,this},n.prototype._finalizeQueryString=function(){var t=this._query.join("&");if(t&&(this.url+=(this.url.includes("?")?"&":"?")+t),this._query.length=0,this._sort){var e=this.url.indexOf("?");if(e>=0){var r=this.url.slice(e+1).split("&");"function"==typeof this._sort?r.sort(this._sort):r.sort(),this.url=this.url.slice(0,e)+"?"+r.join("&")}}},n.prototype._appendQueryString=function(){console.warn("Unsupported")},n.prototype._timeoutError=function(t,e,r){if(!this._aborted){var o=new Error("".concat(t+e,"ms exceeded"));o.timeout=e,o.code="ECONNABORTED",o.errno=r,this.timedout=!0,this.timedoutError=o,this.abort(),this.callback(o)}},n.prototype._setTimeouts=function(){var t=this;this._timeout&&!this._timer&&(this._timer=setTimeout((function(){t._timeoutError("Timeout of ",t._timeout,"ETIME")}),this._timeout)),this._responseTimeout&&!this._responseTimeoutTimer&&(this._responseTimeoutTimer=setTimeout((function(){t._timeoutError("Response timeout of ",t._responseTimeout,"ETIMEDOUT")}),this._responseTimeout))}}).call(this)}).call(this,Re);var Je;function Xe(){}Je=Xe,Xe.prototype.get=function(t){return this.header[t.toLowerCase()]},Xe.prototype._setHeaderProperties=function(t){var e=t["content-type"]||"";this.type=Te.type(e);var r=Te.params(e);for(var o in r)Object.prototype.hasOwnProperty.call(r,o)&&(this[o]=r[o]);this.links={};try{t.link&&(this.links=Te.parseLinks(t.link))}catch(n){}},Xe.prototype._setStatusProperties=function(t){var e=Math.trunc(t/100);this.statusCode=t,this.status=this.statusCode,this.statusType=e,this.info=1===e,this.ok=2===e,this.redirect=3===e,this.clientError=4===e,this.serverError=5===e,this.error=(4===e||5===e)&&this.toError(),this.created=201===t,this.accepted=202===t,this.noContent=204===t,this.badRequest=400===t,this.unauthorized=401===t,this.notAcceptable=406===t,this.forbidden=403===t,this.notFound=404===t,this.unprocessableEntity=422===t};var Ve={};function Qe(t){return function(t){if(Array.isArray(t))return Ye(t)}(t)||function(t){if("undefined"!=typeof Symbol&&null!=t[Symbol.iterator]||null!=t["@@iterator"])return Array.from(t)}(t)||Ke(t)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function Ke(t,e){if(t){if("string"==typeof t)return Ye(t,e);var r=Object.prototype.toString.call(t).slice(8,-1);return"Object"===r&&t.constructor&&(r=t.constructor.name),"Map"===r||"Set"===r?Array.from(t):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?Ye(t,e):void 0}}function Ye(t,e){(null==e||e>t.length)&&(e=t.length);for(var r=0,o=new Array(e);r=t.length?{done:!0}:{done:!1,value:t[o++]}},e:function(t){throw t},f:n}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,u=!1;return{s:function(){r=r.call(t)},n:function(){var t=r.next();return a=t.done,t},e:function(t){u=!0,i=t},f:function(){try{a||null==r.return||r.return()}finally{if(u)throw i}}}}(this._defaults);try{for(r.s();!(e=r.n()).done;){var o=e.value;t[o.fn].apply(t,Qe(o.args))}}catch(n){r.e(n)}finally{r.f()}},Ve=Ze;var or,nr={};function ir(t){return(ir="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function ar(t,e){var r="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(!r){if(Array.isArray(t)||(r=function(t,e){if(!t)return;if("string"==typeof t)return ur(t,e);var r=Object.prototype.toString.call(t).slice(8,-1);"Object"===r&&t.constructor&&(r=t.constructor.name);if("Map"===r||"Set"===r)return Array.from(t);if("Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return ur(t,e)}(t))||e&&t&&"number"==typeof t.length){r&&(t=r);var o=0,n=function(){};return{s:n,n:function(){return o>=t.length?{done:!0}:{done:!1,value:t[o++]}},e:function(t){throw t},f:n}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,u=!1;return{s:function(){r=r.call(t)},n:function(){var t=r.next();return a=t.done,t},e:function(t){u=!0,i=t},f:function(){try{a||null==r.return||r.return()}finally{if(u)throw i}}}}function ur(t,e){(null==e||e>t.length)&&(e=t.length);for(var r=0,o=new Array(e);r0||t instanceof Object)?e(t):null)},vr.prototype.toError=function(){var t=this.req,e=t.method,r=t.url,o="cannot ".concat(e," ").concat(r," (").concat(this.status,")"),n=new Error(o);return n.status=this.status,n.method=e,n.url=r,n},fr.Response=vr,t(gr.prototype),cr(gr.prototype,Ge.prototype),gr.prototype.type=function(t){return this.set("Content-Type",fr.types[t]||t),this},gr.prototype.accept=function(t){return this.set("Accept",fr.types[t]||t),this},gr.prototype.auth=function(t,e,r){1===arguments.length&&(e=""),"object"==ir(e)&&null!==e&&(r=e,e=""),r||(r={type:"function"==typeof btoa?"basic":"auto"});var o=r.encoder?r.encoder:function(t){if("function"==typeof btoa)return btoa(t);throw new Error("Cannot use basic auth, btoa is not a function")};return this._auth(t,e,r,o)},gr.prototype.query=function(t){return"string"!=typeof t&&(t=hr(t)),t&&this._query.push(t),this},gr.prototype.attach=function(t,e,r){if(e){if(this._data)throw new Error("superagent can't mix .send() and .attach()");this._getFormData().append(t,e,r||e.name)}return this},gr.prototype._getFormData=function(){return this._formData||(this._formData=new or.FormData),this._formData},gr.prototype.callback=function(t,e){if(this._shouldRetry(t,e))return this._retry();var r=this._callback;this.clearTimeout(),t&&(this._maxRetries&&(t.retries=this._retries-1),this.emit("error",t)),r(t,e)},gr.prototype.crossDomainError=function(){var t=new Error("Request has been terminated\nPossible causes: the network is offline, Origin is not allowed by Access-Control-Allow-Origin, the page is being unloaded, etc.");t.crossDomain=!0,t.status=this.status,t.method=this.method,t.url=this.url,this.callback(t)},gr.prototype.agent=function(){return console.warn("This is not supported in browser version of superagent"),this},gr.prototype.ca=gr.prototype.agent,gr.prototype.buffer=gr.prototype.ca,gr.prototype.write=function(){throw new Error("Streaming is not supported in browser version of superagent")},gr.prototype.pipe=gr.prototype.write,gr.prototype._isHost=function(t){return t&&"object"==ir(t)&&!Array.isArray(t)&&"[object Object]"!==Object.prototype.toString.call(t)},gr.prototype.end=function(t){this._endCalled&&console.warn("Warning: .end() was called twice. This is not supported in superagent"),this._endCalled=!0,this._callback=t||pr,this._finalizeQueryString(),this._end()},gr.prototype._setUploadTimeout=function(){var t=this;this._uploadTimeout&&!this._uploadTimeoutTimer&&(this._uploadTimeoutTimer=setTimeout((function(){t._timeoutError("Upload timeout of ",t._uploadTimeout,"ETIMEDOUT")}),this._uploadTimeout))},gr.prototype._end=function(){if(this._aborted)return this.callback(new Error("The request has been aborted even before .end() was called"));var t=this;this.xhr=fr.getXHR();var e=this.xhr,r=this._formData||this._data;this._setTimeouts(),e.addEventListener("readystatechange",(function(){var r=e.readyState;if(r>=2&&t._responseTimeoutTimer&&clearTimeout(t._responseTimeoutTimer),4===r){var o;try{o=e.status}catch(n){o=0}if(!o){if(t.timedout||t._aborted)return;return t.crossDomainError()}t.emit("end")}}));var o=function(e,r){r.total>0&&(r.percent=r.loaded/r.total*100,100===r.percent&&clearTimeout(t._uploadTimeoutTimer)),r.direction=e,t.emit("progress",r)};if(this.hasListeners("progress"))try{e.addEventListener("progress",o.bind(null,"download")),e.upload&&e.upload.addEventListener("progress",o.bind(null,"upload"))}catch(u){}e.upload&&this._setUploadTimeout();try{this.username&&this.password?e.open(this.method,this.url,!0,this.username,this.password):e.open(this.method,this.url,!0)}catch(s){return this.callback(s)}if(this._withCredentials&&(e.withCredentials=!0),!this._formData&&"GET"!==this.method&&"HEAD"!==this.method&&"string"!=typeof r&&!this._isHost(r)){var n=this._header["content-type"],i=this._serializer||fr.serialize[n?n.split(";")[0]:""];!i&&br(n)&&(i=fr.serialize["application/json"]),i&&(r=i(r))}for(var a in this.header)null!==this.header[a]&&lr(this.header,a)&&e.setRequestHeader(a,this.header[a]);this._responseType&&(e.responseType=this._responseType),this.emit("request",this),e.send(void 0===r?null:r)},fr.agent=function(){return new Ve};for(var Sr=function(){var t=_r[wr];Ve.prototype[t.toLowerCase()]=function(e,r){var o=new fr.Request(t,e);return this._setDefaults(o),r&&o.end(r),o}},wr=0,_r=["GET","POST","OPTIONS","PATCH","PUT","DELETE"];wr<_r.length;wr++)Sr();function Ar(t,e,r){var o=fr("DELETE",t);return"function"==typeof e&&(r=e,e=null),e&&o.send(e),r&&o.end(r),o}return Ve.prototype.del=Ve.prototype.delete,fr.get=function(t,e,r){var o=fr("GET",t);return"function"==typeof e&&(r=e,e=null),e&&o.query(e),r&&o.end(r),o},fr.head=function(t,e,r){var o=fr("HEAD",t);return"function"==typeof e&&(r=e,e=null),e&&o.query(e),r&&o.end(r),o},fr.options=function(t,e,r){var o=fr("OPTIONS",t);return"function"==typeof e&&(r=e,e=null),e&&o.send(e),r&&o.end(r),o},fr.del=Ar,fr.delete=Ar,fr.patch=function(t,e,r){var o=fr("PATCH",t);return"function"==typeof e&&(r=e,e=null),e&&o.send(e),r&&o.end(r),o},fr.post=function(t,e,r){var o=fr("POST",t);return"function"==typeof e&&(r=e,e=null),e&&o.send(e),r&&o.end(r),o},fr.put=function(t,e,r){var o=fr("PUT",t);return"function"==typeof e&&(r=e,e=null),e&&o.send(e),r&&o.end(r),o},nr})); \ No newline at end of file diff --git a/node_modules/superagent/lib/agent-base.js b/node_modules/superagent/lib/agent-base.js new file mode 100644 index 0000000..13ec934 --- /dev/null +++ b/node_modules/superagent/lib/agent-base.js @@ -0,0 +1,60 @@ +"use strict"; + +function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); } + +function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter); } + +function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +function Agent() { + this._defaults = []; +} + +var _loop = function _loop() { + var fn = _arr[_i]; + + // Default setting for all requests from this agent + Agent.prototype[fn] = function () { + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + this._defaults.push({ + fn: fn, + args: args + }); + + return this; + }; +}; + +for (var _i = 0, _arr = ['use', 'on', 'once', 'set', 'query', 'type', 'accept', 'auth', 'withCredentials', 'sortQuery', 'retry', 'ok', 'redirects', 'timeout', 'buffer', 'serialize', 'parse', 'ca', 'key', 'pfx', 'cert', 'disableTLSCerts']; _i < _arr.length; _i++) { + _loop(); +} + +Agent.prototype._setDefaults = function (request) { + var _iterator = _createForOfIteratorHelper(this._defaults), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var def = _step.value; + request[def.fn].apply(request, _toConsumableArray(def.args)); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } +}; + +module.exports = Agent; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJBZ2VudCIsIl9kZWZhdWx0cyIsImZuIiwicHJvdG90eXBlIiwiYXJncyIsInB1c2giLCJfc2V0RGVmYXVsdHMiLCJyZXF1ZXN0IiwiZGVmIiwibW9kdWxlIiwiZXhwb3J0cyJdLCJzb3VyY2VzIjpbIi4uL3NyYy9hZ2VudC1iYXNlLmpzIl0sInNvdXJjZXNDb250ZW50IjpbImZ1bmN0aW9uIEFnZW50KCkge1xuICB0aGlzLl9kZWZhdWx0cyA9IFtdO1xufVxuXG5mb3IgKGNvbnN0IGZuIG9mIFtcbiAgJ3VzZScsXG4gICdvbicsXG4gICdvbmNlJyxcbiAgJ3NldCcsXG4gICdxdWVyeScsXG4gICd0eXBlJyxcbiAgJ2FjY2VwdCcsXG4gICdhdXRoJyxcbiAgJ3dpdGhDcmVkZW50aWFscycsXG4gICdzb3J0UXVlcnknLFxuICAncmV0cnknLFxuICAnb2snLFxuICAncmVkaXJlY3RzJyxcbiAgJ3RpbWVvdXQnLFxuICAnYnVmZmVyJyxcbiAgJ3NlcmlhbGl6ZScsXG4gICdwYXJzZScsXG4gICdjYScsXG4gICdrZXknLFxuICAncGZ4JyxcbiAgJ2NlcnQnLFxuICAnZGlzYWJsZVRMU0NlcnRzJ1xuXSkge1xuICAvLyBEZWZhdWx0IHNldHRpbmcgZm9yIGFsbCByZXF1ZXN0cyBmcm9tIHRoaXMgYWdlbnRcbiAgQWdlbnQucHJvdG90eXBlW2ZuXSA9IGZ1bmN0aW9uICguLi5hcmdzKSB7XG4gICAgdGhpcy5fZGVmYXVsdHMucHVzaCh7IGZuLCBhcmdzIH0pO1xuICAgIHJldHVybiB0aGlzO1xuICB9O1xufVxuXG5BZ2VudC5wcm90b3R5cGUuX3NldERlZmF1bHRzID0gZnVuY3Rpb24gKHJlcXVlc3QpIHtcbiAgZm9yIChjb25zdCBkZWYgb2YgdGhpcy5fZGVmYXVsdHMpIHtcbiAgICByZXF1ZXN0W2RlZi5mbl0oLi4uZGVmLmFyZ3MpO1xuICB9XG59O1xuXG5tb2R1bGUuZXhwb3J0cyA9IEFnZW50O1xuIl0sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7O0FBQUEsU0FBU0EsS0FBVCxHQUFpQjtFQUNmLEtBQUtDLFNBQUwsR0FBaUIsRUFBakI7QUFDRDs7O0VBRUksSUFBTUMsRUFBRSxXQUFSOztFQXdCSDtFQUNBRixLQUFLLENBQUNHLFNBQU4sQ0FBZ0JELEVBQWhCLElBQXNCLFlBQW1CO0lBQUEsa0NBQU5FLElBQU07TUFBTkEsSUFBTTtJQUFBOztJQUN2QyxLQUFLSCxTQUFMLENBQWVJLElBQWYsQ0FBb0I7TUFBRUgsRUFBRSxFQUFGQSxFQUFGO01BQU1FLElBQUksRUFBSkE7SUFBTixDQUFwQjs7SUFDQSxPQUFPLElBQVA7RUFDRCxDQUhEOzs7QUF6QkYsd0JBQWlCLENBQ2YsS0FEZSxFQUVmLElBRmUsRUFHZixNQUhlLEVBSWYsS0FKZSxFQUtmLE9BTGUsRUFNZixNQU5lLEVBT2YsUUFQZSxFQVFmLE1BUmUsRUFTZixpQkFUZSxFQVVmLFdBVmUsRUFXZixPQVhlLEVBWWYsSUFaZSxFQWFmLFdBYmUsRUFjZixTQWRlLEVBZWYsUUFmZSxFQWdCZixXQWhCZSxFQWlCZixPQWpCZSxFQWtCZixJQWxCZSxFQW1CZixLQW5CZSxFQW9CZixLQXBCZSxFQXFCZixNQXJCZSxFQXNCZixpQkF0QmUsQ0FBakIsMEJBdUJHO0VBQUE7QUFNRjs7QUFFREosS0FBSyxDQUFDRyxTQUFOLENBQWdCRyxZQUFoQixHQUErQixVQUFVQyxPQUFWLEVBQW1CO0VBQUEsMkNBQzlCLEtBQUtOLFNBRHlCO0VBQUE7O0VBQUE7SUFDaEQsb0RBQWtDO01BQUEsSUFBdkJPLEdBQXVCO01BQ2hDRCxPQUFPLENBQUNDLEdBQUcsQ0FBQ04sRUFBTCxDQUFQLE9BQUFLLE9BQU8scUJBQVlDLEdBQUcsQ0FBQ0osSUFBaEIsRUFBUDtJQUNEO0VBSCtDO0lBQUE7RUFBQTtJQUFBO0VBQUE7QUFJakQsQ0FKRDs7QUFNQUssTUFBTSxDQUFDQyxPQUFQLEdBQWlCVixLQUFqQiJ9 \ No newline at end of file diff --git a/node_modules/superagent/lib/client.js b/node_modules/superagent/lib/client.js new file mode 100644 index 0000000..3bcbd9b --- /dev/null +++ b/node_modules/superagent/lib/client.js @@ -0,0 +1,1052 @@ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +/** + * Root reference for iframes. + */ +var root; + +if (typeof window !== 'undefined') { + // Browser window + root = window; +} else if (typeof self === 'undefined') { + // Other environments + console.warn('Using browser-only version of superagent in non-browser environment'); + root = void 0; +} else { + // Web Worker + root = self; +} + +var Emitter = require('component-emitter'); + +var safeStringify = require('fast-safe-stringify'); + +var qs = require('qs'); + +var RequestBase = require('./request-base'); + +var _require = require('./utils'), + isObject = _require.isObject, + mixin = _require.mixin, + hasOwn = _require.hasOwn; + +var ResponseBase = require('./response-base'); + +var Agent = require('./agent-base'); +/** + * Noop. + */ + + +function noop() {} +/** + * Expose `request`. + */ + + +module.exports = function (method, url) { + // callback + if (typeof url === 'function') { + return new exports.Request('GET', method).end(url); + } // url first + + + if (arguments.length === 1) { + return new exports.Request('GET', method); + } + + return new exports.Request(method, url); +}; + +exports = module.exports; +var request = exports; +exports.Request = Request; +/** + * Determine XHR. + */ + +request.getXHR = function () { + if (root.XMLHttpRequest && (!root.location || root.location.protocol !== 'file:' || !root.ActiveXObject)) { + return new XMLHttpRequest(); + } + + try { + return new ActiveXObject('Microsoft.XMLHTTP'); + } catch (_unused) { + /**/ + } + + try { + return new ActiveXObject('Msxml2.XMLHTTP.6.0'); + } catch (_unused2) { + /**/ + } + + try { + return new ActiveXObject('Msxml2.XMLHTTP.3.0'); + } catch (_unused3) { + /**/ + } + + try { + return new ActiveXObject('Msxml2.XMLHTTP'); + } catch (_unused4) { + /**/ + } + + throw new Error('Browser-only version of superagent could not find XHR'); +}; +/** + * Removes leading and trailing whitespace, added to support IE. + * + * @param {String} s + * @return {String} + * @api private + */ + + +var trim = ''.trim ? function (s) { + return s.trim(); +} : function (s) { + return s.replace(/(^\s*|\s*$)/g, ''); +}; +/** + * Serialize the given `obj`. + * + * @param {Object} obj + * @return {String} + * @api private + */ + +function serialize(object) { + if (!isObject(object)) return object; + var pairs = []; + + for (var key in object) { + if (hasOwn(object, key)) pushEncodedKeyValuePair(pairs, key, object[key]); + } + + return pairs.join('&'); +} +/** + * Helps 'serialize' with serializing arrays. + * Mutates the pairs array. + * + * @param {Array} pairs + * @param {String} key + * @param {Mixed} val + */ + + +function pushEncodedKeyValuePair(pairs, key, value) { + if (value === undefined) return; + + if (value === null) { + pairs.push(encodeURI(key)); + return; + } + + if (Array.isArray(value)) { + var _iterator = _createForOfIteratorHelper(value), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var v = _step.value; + pushEncodedKeyValuePair(pairs, key, v); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + } else if (isObject(value)) { + for (var subkey in value) { + if (hasOwn(value, subkey)) pushEncodedKeyValuePair(pairs, "".concat(key, "[").concat(subkey, "]"), value[subkey]); + } + } else { + pairs.push(encodeURI(key) + '=' + encodeURIComponent(value)); + } +} +/** + * Expose serialization method. + */ + + +request.serializeObject = serialize; +/** + * Parse the given x-www-form-urlencoded `str`. + * + * @param {String} str + * @return {Object} + * @api private + */ + +function parseString(string_) { + var object = {}; + var pairs = string_.split('&'); + var pair; + var pos; + + for (var i = 0, length_ = pairs.length; i < length_; ++i) { + pair = pairs[i]; + pos = pair.indexOf('='); + + if (pos === -1) { + object[decodeURIComponent(pair)] = ''; + } else { + object[decodeURIComponent(pair.slice(0, pos))] = decodeURIComponent(pair.slice(pos + 1)); + } + } + + return object; +} +/** + * Expose parser. + */ + + +request.parseString = parseString; +/** + * Default MIME type map. + * + * superagent.types.xml = 'application/xml'; + * + */ + +request.types = { + html: 'text/html', + json: 'application/json', + xml: 'text/xml', + urlencoded: 'application/x-www-form-urlencoded', + form: 'application/x-www-form-urlencoded', + 'form-data': 'application/x-www-form-urlencoded' +}; +/** + * Default serialization map. + * + * superagent.serialize['application/xml'] = function(obj){ + * return 'generated xml here'; + * }; + * + */ + +request.serialize = { + 'application/x-www-form-urlencoded': qs.stringify, + 'application/json': safeStringify +}; +/** + * Default parsers. + * + * superagent.parse['application/xml'] = function(str){ + * return { object parsed from str }; + * }; + * + */ + +request.parse = { + 'application/x-www-form-urlencoded': parseString, + 'application/json': JSON.parse +}; +/** + * Parse the given header `str` into + * an object containing the mapped fields. + * + * @param {String} str + * @return {Object} + * @api private + */ + +function parseHeader(string_) { + var lines = string_.split(/\r?\n/); + var fields = {}; + var index; + var line; + var field; + var value; + + for (var i = 0, length_ = lines.length; i < length_; ++i) { + line = lines[i]; + index = line.indexOf(':'); + + if (index === -1) { + // could be empty line, just skip it + continue; + } + + field = line.slice(0, index).toLowerCase(); + value = trim(line.slice(index + 1)); + fields[field] = value; + } + + return fields; +} +/** + * Check if `mime` is json or has +json structured syntax suffix. + * + * @param {String} mime + * @return {Boolean} + * @api private + */ + + +function isJSON(mime) { + // should match /json or +json + // but not /json-seq + return /[/+]json($|[^-\w])/i.test(mime); +} +/** + * Initialize a new `Response` with the given `xhr`. + * + * - set flags (.ok, .error, etc) + * - parse header + * + * Examples: + * + * Aliasing `superagent` as `request` is nice: + * + * request = superagent; + * + * We can use the promise-like API, or pass callbacks: + * + * request.get('/').end(function(res){}); + * request.get('/', function(res){}); + * + * Sending data can be chained: + * + * request + * .post('/user') + * .send({ name: 'tj' }) + * .end(function(res){}); + * + * Or passed to `.send()`: + * + * request + * .post('/user') + * .send({ name: 'tj' }, function(res){}); + * + * Or passed to `.post()`: + * + * request + * .post('/user', { name: 'tj' }) + * .end(function(res){}); + * + * Or further reduced to a single call for simple cases: + * + * request + * .post('/user', { name: 'tj' }, function(res){}); + * + * @param {XMLHTTPRequest} xhr + * @param {Object} options + * @api private + */ + + +function Response(request_) { + this.req = request_; + this.xhr = this.req.xhr; // responseText is accessible only if responseType is '' or 'text' and on older browsers + + this.text = this.req.method !== 'HEAD' && (this.xhr.responseType === '' || this.xhr.responseType === 'text') || typeof this.xhr.responseType === 'undefined' ? this.xhr.responseText : null; + this.statusText = this.req.xhr.statusText; + var status = this.xhr.status; // handle IE9 bug: http://stackoverflow.com/questions/10046972/msie-returns-status-code-of-1223-for-ajax-request + + if (status === 1223) { + status = 204; + } + + this._setStatusProperties(status); + + this.headers = parseHeader(this.xhr.getAllResponseHeaders()); + this.header = this.headers; // getAllResponseHeaders sometimes falsely returns "" for CORS requests, but + // getResponseHeader still works. so we get content-type even if getting + // other headers fails. + + this.header['content-type'] = this.xhr.getResponseHeader('content-type'); + + this._setHeaderProperties(this.header); + + if (this.text === null && request_._responseType) { + this.body = this.xhr.response; + } else { + this.body = this.req.method === 'HEAD' ? null : this._parseBody(this.text ? this.text : this.xhr.response); + } +} + +mixin(Response.prototype, ResponseBase.prototype); +/** + * Parse the given body `str`. + * + * Used for auto-parsing of bodies. Parsers + * are defined on the `superagent.parse` object. + * + * @param {String} str + * @return {Mixed} + * @api private + */ + +Response.prototype._parseBody = function (string_) { + var parse = request.parse[this.type]; + + if (this.req._parser) { + return this.req._parser(this, string_); + } + + if (!parse && isJSON(this.type)) { + parse = request.parse['application/json']; + } + + return parse && string_ && (string_.length > 0 || string_ instanceof Object) ? parse(string_) : null; +}; +/** + * Return an `Error` representative of this response. + * + * @return {Error} + * @api public + */ + + +Response.prototype.toError = function () { + var req = this.req; + var method = req.method; + var url = req.url; + var message = "cannot ".concat(method, " ").concat(url, " (").concat(this.status, ")"); + var error = new Error(message); + error.status = this.status; + error.method = method; + error.url = url; + return error; +}; +/** + * Expose `Response`. + */ + + +request.Response = Response; +/** + * Initialize a new `Request` with the given `method` and `url`. + * + * @param {String} method + * @param {String} url + * @api public + */ + +function Request(method, url) { + var self = this; + this._query = this._query || []; + this.method = method; + this.url = url; + this.header = {}; // preserves header name case + + this._header = {}; // coerces header names to lowercase + + this.on('end', function () { + var error = null; + var res = null; + + try { + res = new Response(self); + } catch (err) { + error = new Error('Parser is unable to parse the response'); + error.parse = true; + error.original = err; // issue #675: return the raw response if the response parsing fails + + if (self.xhr) { + // ie9 doesn't have 'response' property + error.rawResponse = typeof self.xhr.responseType === 'undefined' ? self.xhr.responseText : self.xhr.response; // issue #876: return the http status code if the response parsing fails + + error.status = self.xhr.status ? self.xhr.status : null; + error.statusCode = error.status; // backwards-compat only + } else { + error.rawResponse = null; + error.status = null; + } + + return self.callback(error); + } + + self.emit('response', res); + var new_error; + + try { + if (!self._isResponseOK(res)) { + new_error = new Error(res.statusText || res.text || 'Unsuccessful HTTP response'); + } + } catch (err) { + new_error = err; // ok() callback can throw + } // #1000 don't catch errors from the callback to avoid double calling it + + + if (new_error) { + new_error.original = error; + new_error.response = res; + new_error.status = res.status; + self.callback(new_error, res); + } else { + self.callback(null, res); + } + }); +} +/** + * Mixin `Emitter` and `RequestBase`. + */ +// eslint-disable-next-line new-cap + + +Emitter(Request.prototype); +mixin(Request.prototype, RequestBase.prototype); +/** + * Set Content-Type to `type`, mapping values from `request.types`. + * + * Examples: + * + * superagent.types.xml = 'application/xml'; + * + * request.post('/') + * .type('xml') + * .send(xmlstring) + * .end(callback); + * + * request.post('/') + * .type('application/xml') + * .send(xmlstring) + * .end(callback); + * + * @param {String} type + * @return {Request} for chaining + * @api public + */ + +Request.prototype.type = function (type) { + this.set('Content-Type', request.types[type] || type); + return this; +}; +/** + * Set Accept to `type`, mapping values from `request.types`. + * + * Examples: + * + * superagent.types.json = 'application/json'; + * + * request.get('/agent') + * .accept('json') + * .end(callback); + * + * request.get('/agent') + * .accept('application/json') + * .end(callback); + * + * @param {String} accept + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.accept = function (type) { + this.set('Accept', request.types[type] || type); + return this; +}; +/** + * Set Authorization field value with `user` and `pass`. + * + * @param {String} user + * @param {String} [pass] optional in case of using 'bearer' as type + * @param {Object} options with 'type' property 'auto', 'basic' or 'bearer' (default 'basic') + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.auth = function (user, pass, options) { + if (arguments.length === 1) pass = ''; + + if (_typeof(pass) === 'object' && pass !== null) { + // pass is optional and can be replaced with options + options = pass; + pass = ''; + } + + if (!options) { + options = { + type: typeof btoa === 'function' ? 'basic' : 'auto' + }; + } + + var encoder = options.encoder ? options.encoder : function (string) { + if (typeof btoa === 'function') { + return btoa(string); + } + + throw new Error('Cannot use basic auth, btoa is not a function'); + }; + return this._auth(user, pass, options, encoder); +}; +/** + * Add query-string `val`. + * + * Examples: + * + * request.get('/shoes') + * .query('size=10') + * .query({ color: 'blue' }) + * + * @param {Object|String} val + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.query = function (value) { + if (typeof value !== 'string') value = serialize(value); + if (value) this._query.push(value); + return this; +}; +/** + * Queue the given `file` as an attachment to the specified `field`, + * with optional `options` (or filename). + * + * ``` js + * request.post('/upload') + * .attach('content', new Blob(['hey!'], { type: "text/html"})) + * .end(callback); + * ``` + * + * @param {String} field + * @param {Blob|File} file + * @param {String|Object} options + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.attach = function (field, file, options) { + if (file) { + if (this._data) { + throw new Error("superagent can't mix .send() and .attach()"); + } + + this._getFormData().append(field, file, options || file.name); + } + + return this; +}; + +Request.prototype._getFormData = function () { + if (!this._formData) { + this._formData = new root.FormData(); + } + + return this._formData; +}; +/** + * Invoke the callback with `err` and `res` + * and handle arity check. + * + * @param {Error} err + * @param {Response} res + * @api private + */ + + +Request.prototype.callback = function (error, res) { + if (this._shouldRetry(error, res)) { + return this._retry(); + } + + var fn = this._callback; + this.clearTimeout(); + + if (error) { + if (this._maxRetries) error.retries = this._retries - 1; + this.emit('error', error); + } + + fn(error, res); +}; +/** + * Invoke callback with x-domain error. + * + * @api private + */ + + +Request.prototype.crossDomainError = function () { + var error = new Error('Request has been terminated\nPossible causes: the network is offline, Origin is not allowed by Access-Control-Allow-Origin, the page is being unloaded, etc.'); + error.crossDomain = true; + error.status = this.status; + error.method = this.method; + error.url = this.url; + this.callback(error); +}; // This only warns, because the request is still likely to work + + +Request.prototype.agent = function () { + console.warn('This is not supported in browser version of superagent'); + return this; +}; + +Request.prototype.ca = Request.prototype.agent; +Request.prototype.buffer = Request.prototype.ca; // This throws, because it can't send/receive data as expected + +Request.prototype.write = function () { + throw new Error('Streaming is not supported in browser version of superagent'); +}; + +Request.prototype.pipe = Request.prototype.write; +/** + * Check if `obj` is a host object, + * we don't want to serialize these :) + * + * @param {Object} obj host object + * @return {Boolean} is a host object + * @api private + */ + +Request.prototype._isHost = function (object) { + // Native objects stringify to [object File], [object Blob], [object FormData], etc. + return object && _typeof(object) === 'object' && !Array.isArray(object) && Object.prototype.toString.call(object) !== '[object Object]'; +}; +/** + * Initiate request, invoking callback `fn(res)` + * with an instanceof `Response`. + * + * @param {Function} fn + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.end = function (fn) { + if (this._endCalled) { + console.warn('Warning: .end() was called twice. This is not supported in superagent'); + } + + this._endCalled = true; // store callback + + this._callback = fn || noop; // querystring + + this._finalizeQueryString(); + + this._end(); +}; + +Request.prototype._setUploadTimeout = function () { + var self = this; // upload timeout it's wokrs only if deadline timeout is off + + if (this._uploadTimeout && !this._uploadTimeoutTimer) { + this._uploadTimeoutTimer = setTimeout(function () { + self._timeoutError('Upload timeout of ', self._uploadTimeout, 'ETIMEDOUT'); + }, this._uploadTimeout); + } +}; // eslint-disable-next-line complexity + + +Request.prototype._end = function () { + if (this._aborted) return this.callback(new Error('The request has been aborted even before .end() was called')); + var self = this; + this.xhr = request.getXHR(); + var xhr = this.xhr; + var data = this._formData || this._data; + + this._setTimeouts(); // state change + + + xhr.addEventListener('readystatechange', function () { + var readyState = xhr.readyState; + + if (readyState >= 2 && self._responseTimeoutTimer) { + clearTimeout(self._responseTimeoutTimer); + } + + if (readyState !== 4) { + return; + } // In IE9, reads to any property (e.g. status) off of an aborted XHR will + // result in the error "Could not complete the operation due to error c00c023f" + + + var status; + + try { + status = xhr.status; + } catch (_unused5) { + status = 0; + } + + if (!status) { + if (self.timedout || self._aborted) return; + return self.crossDomainError(); + } + + self.emit('end'); + }); // progress + + var handleProgress = function handleProgress(direction, e) { + if (e.total > 0) { + e.percent = e.loaded / e.total * 100; + + if (e.percent === 100) { + clearTimeout(self._uploadTimeoutTimer); + } + } + + e.direction = direction; + self.emit('progress', e); + }; + + if (this.hasListeners('progress')) { + try { + xhr.addEventListener('progress', handleProgress.bind(null, 'download')); + + if (xhr.upload) { + xhr.upload.addEventListener('progress', handleProgress.bind(null, 'upload')); + } + } catch (_unused6) {// Accessing xhr.upload fails in IE from a web worker, so just pretend it doesn't exist. + // Reported here: + // https://connect.microsoft.com/IE/feedback/details/837245/xmlhttprequest-upload-throws-invalid-argument-when-used-from-web-worker-context + } + } + + if (xhr.upload) { + this._setUploadTimeout(); + } // initiate request + + + try { + if (this.username && this.password) { + xhr.open(this.method, this.url, true, this.username, this.password); + } else { + xhr.open(this.method, this.url, true); + } + } catch (err) { + // see #1149 + return this.callback(err); + } // CORS + + + if (this._withCredentials) xhr.withCredentials = true; // body + + if (!this._formData && this.method !== 'GET' && this.method !== 'HEAD' && typeof data !== 'string' && !this._isHost(data)) { + // serialize stuff + var contentType = this._header['content-type']; + + var _serialize = this._serializer || request.serialize[contentType ? contentType.split(';')[0] : '']; + + if (!_serialize && isJSON(contentType)) { + _serialize = request.serialize['application/json']; + } + + if (_serialize) data = _serialize(data); + } // set header fields + + + for (var field in this.header) { + if (this.header[field] === null) continue; + if (hasOwn(this.header, field)) xhr.setRequestHeader(field, this.header[field]); + } + + if (this._responseType) { + xhr.responseType = this._responseType; + } // send stuff + + + this.emit('request', this); // IE11 xhr.send(undefined) sends 'undefined' string as POST payload (instead of nothing) + // We need null here if data is undefined + + xhr.send(typeof data === 'undefined' ? null : data); +}; + +request.agent = function () { + return new Agent(); +}; + +var _loop = function _loop() { + var method = _arr[_i]; + + Agent.prototype[method.toLowerCase()] = function (url, fn) { + var request_ = new request.Request(method, url); + + this._setDefaults(request_); + + if (fn) { + request_.end(fn); + } + + return request_; + }; +}; + +for (var _i = 0, _arr = ['GET', 'POST', 'OPTIONS', 'PATCH', 'PUT', 'DELETE']; _i < _arr.length; _i++) { + _loop(); +} + +Agent.prototype.del = Agent.prototype.delete; +/** + * GET `url` with optional callback `fn(res)`. + * + * @param {String} url + * @param {Mixed|Function} [data] or fn + * @param {Function} [fn] + * @return {Request} + * @api public + */ + +request.get = function (url, data, fn) { + var request_ = request('GET', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.query(data); + if (fn) request_.end(fn); + return request_; +}; +/** + * HEAD `url` with optional callback `fn(res)`. + * + * @param {String} url + * @param {Mixed|Function} [data] or fn + * @param {Function} [fn] + * @return {Request} + * @api public + */ + + +request.head = function (url, data, fn) { + var request_ = request('HEAD', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.query(data); + if (fn) request_.end(fn); + return request_; +}; +/** + * OPTIONS query to `url` with optional callback `fn(res)`. + * + * @param {String} url + * @param {Mixed|Function} [data] or fn + * @param {Function} [fn] + * @return {Request} + * @api public + */ + + +request.options = function (url, data, fn) { + var request_ = request('OPTIONS', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; +/** + * DELETE `url` with optional `data` and callback `fn(res)`. + * + * @param {String} url + * @param {Mixed} [data] + * @param {Function} [fn] + * @return {Request} + * @api public + */ + + +function del(url, data, fn) { + var request_ = request('DELETE', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +} + +request.del = del; +request.delete = del; +/** + * PATCH `url` with optional `data` and callback `fn(res)`. + * + * @param {String} url + * @param {Mixed} [data] + * @param {Function} [fn] + * @return {Request} + * @api public + */ + +request.patch = function (url, data, fn) { + var request_ = request('PATCH', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; +/** + * POST `url` with optional `data` and callback `fn(res)`. + * + * @param {String} url + * @param {Mixed} [data] + * @param {Function} [fn] + * @return {Request} + * @api public + */ + + +request.post = function (url, data, fn) { + var request_ = request('POST', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; +/** + * PUT `url` with optional `data` and callback `fn(res)`. + * + * @param {String} url + * @param {Mixed|Function} [data] or fn + * @param {Function} [fn] + * @return {Request} + * @api public + */ + + +request.put = function (url, data, fn) { + var request_ = request('PUT', url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) request_.send(data); + if (fn) request_.end(fn); + return request_; +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["root","window","self","console","warn","Emitter","require","safeStringify","qs","RequestBase","isObject","mixin","hasOwn","ResponseBase","Agent","noop","module","exports","method","url","Request","end","arguments","length","request","getXHR","XMLHttpRequest","location","protocol","ActiveXObject","Error","trim","s","replace","serialize","object","pairs","key","pushEncodedKeyValuePair","join","value","undefined","push","encodeURI","Array","isArray","v","subkey","encodeURIComponent","serializeObject","parseString","string_","split","pair","pos","i","length_","indexOf","decodeURIComponent","slice","types","html","json","xml","urlencoded","form","stringify","parse","JSON","parseHeader","lines","fields","index","line","field","toLowerCase","isJSON","mime","test","Response","request_","req","xhr","text","responseType","responseText","statusText","status","_setStatusProperties","headers","getAllResponseHeaders","header","getResponseHeader","_setHeaderProperties","_responseType","body","response","_parseBody","prototype","type","_parser","Object","toError","message","error","_query","_header","on","res","err","original","rawResponse","statusCode","callback","emit","new_error","_isResponseOK","set","accept","auth","user","pass","options","btoa","encoder","string","_auth","query","attach","file","_data","_getFormData","append","name","_formData","FormData","_shouldRetry","_retry","fn","_callback","clearTimeout","_maxRetries","retries","_retries","crossDomainError","crossDomain","agent","ca","buffer","write","pipe","_isHost","toString","call","_endCalled","_finalizeQueryString","_end","_setUploadTimeout","_uploadTimeout","_uploadTimeoutTimer","setTimeout","_timeoutError","_aborted","data","_setTimeouts","addEventListener","readyState","_responseTimeoutTimer","timedout","handleProgress","direction","e","total","percent","loaded","hasListeners","bind","upload","username","password","open","_withCredentials","withCredentials","contentType","_serializer","setRequestHeader","send","_setDefaults","del","delete","get","head","patch","post","put"],"sources":["../src/client.js"],"sourcesContent":["/**\n * Root reference for iframes.\n */\n\nlet root;\nif (typeof window !== 'undefined') {\n  // Browser window\n  root = window;\n} else if (typeof self === 'undefined') {\n  // Other environments\n  console.warn(\n    'Using browser-only version of superagent in non-browser environment'\n  );\n  root = this;\n} else {\n  // Web Worker\n  root = self;\n}\n\nconst Emitter = require('component-emitter');\nconst safeStringify = require('fast-safe-stringify');\nconst qs = require('qs');\nconst RequestBase = require('./request-base');\nconst { isObject, mixin, hasOwn } = require('./utils');\nconst ResponseBase = require('./response-base');\nconst Agent = require('./agent-base');\n\n/**\n * Noop.\n */\n\nfunction noop() {}\n\n/**\n * Expose `request`.\n */\n\nmodule.exports = function (method, url) {\n  // callback\n  if (typeof url === 'function') {\n    return new exports.Request('GET', method).end(url);\n  }\n\n  // url first\n  if (arguments.length === 1) {\n    return new exports.Request('GET', method);\n  }\n\n  return new exports.Request(method, url);\n};\n\nexports = module.exports;\n\nconst request = exports;\n\nexports.Request = Request;\n\n/**\n * Determine XHR.\n */\n\nrequest.getXHR = () => {\n  if (\n    root.XMLHttpRequest &&\n    (!root.location ||\n      root.location.protocol !== 'file:' ||\n      !root.ActiveXObject)\n  ) {\n    return new XMLHttpRequest();\n  }\n\n  try {\n    return new ActiveXObject('Microsoft.XMLHTTP');\n  } catch {\n    /**/\n  }\n\n  try {\n    return new ActiveXObject('Msxml2.XMLHTTP.6.0');\n  } catch {\n    /**/\n  }\n\n  try {\n    return new ActiveXObject('Msxml2.XMLHTTP.3.0');\n  } catch {\n    /**/\n  }\n\n  try {\n    return new ActiveXObject('Msxml2.XMLHTTP');\n  } catch {\n    /**/\n  }\n\n  throw new Error('Browser-only version of superagent could not find XHR');\n};\n\n/**\n * Removes leading and trailing whitespace, added to support IE.\n *\n * @param {String} s\n * @return {String}\n * @api private\n */\n\nconst trim = ''.trim ? (s) => s.trim() : (s) => s.replace(/(^\\s*|\\s*$)/g, '');\n\n/**\n * Serialize the given `obj`.\n *\n * @param {Object} obj\n * @return {String}\n * @api private\n */\n\nfunction serialize(object) {\n  if (!isObject(object)) return object;\n  const pairs = [];\n  for (const key in object) {\n    if (hasOwn(object, key)) pushEncodedKeyValuePair(pairs, key, object[key]);\n  }\n\n  return pairs.join('&');\n}\n\n/**\n * Helps 'serialize' with serializing arrays.\n * Mutates the pairs array.\n *\n * @param {Array} pairs\n * @param {String} key\n * @param {Mixed} val\n */\n\nfunction pushEncodedKeyValuePair(pairs, key, value) {\n  if (value === undefined) return;\n  if (value === null) {\n    pairs.push(encodeURI(key));\n    return;\n  }\n\n  if (Array.isArray(value)) {\n    for (const v of value) {\n      pushEncodedKeyValuePair(pairs, key, v);\n    }\n  } else if (isObject(value)) {\n    for (const subkey in value) {\n      if (hasOwn(value, subkey))\n        pushEncodedKeyValuePair(pairs, `${key}[${subkey}]`, value[subkey]);\n    }\n  } else {\n    pairs.push(encodeURI(key) + '=' + encodeURIComponent(value));\n  }\n}\n\n/**\n * Expose serialization method.\n */\n\nrequest.serializeObject = serialize;\n\n/**\n * Parse the given x-www-form-urlencoded `str`.\n *\n * @param {String} str\n * @return {Object}\n * @api private\n */\n\nfunction parseString(string_) {\n  const object = {};\n  const pairs = string_.split('&');\n  let pair;\n  let pos;\n\n  for (let i = 0, length_ = pairs.length; i < length_; ++i) {\n    pair = pairs[i];\n    pos = pair.indexOf('=');\n    if (pos === -1) {\n      object[decodeURIComponent(pair)] = '';\n    } else {\n      object[decodeURIComponent(pair.slice(0, pos))] = decodeURIComponent(\n        pair.slice(pos + 1)\n      );\n    }\n  }\n\n  return object;\n}\n\n/**\n * Expose parser.\n */\n\nrequest.parseString = parseString;\n\n/**\n * Default MIME type map.\n *\n *     superagent.types.xml = 'application/xml';\n *\n */\n\nrequest.types = {\n  html: 'text/html',\n  json: 'application/json',\n  xml: 'text/xml',\n  urlencoded: 'application/x-www-form-urlencoded',\n  form: 'application/x-www-form-urlencoded',\n  'form-data': 'application/x-www-form-urlencoded'\n};\n\n/**\n * Default serialization map.\n *\n *     superagent.serialize['application/xml'] = function(obj){\n *       return 'generated xml here';\n *     };\n *\n */\n\nrequest.serialize = {\n  'application/x-www-form-urlencoded': qs.stringify,\n  'application/json': safeStringify\n};\n\n/**\n * Default parsers.\n *\n *     superagent.parse['application/xml'] = function(str){\n *       return { object parsed from str };\n *     };\n *\n */\n\nrequest.parse = {\n  'application/x-www-form-urlencoded': parseString,\n  'application/json': JSON.parse\n};\n\n/**\n * Parse the given header `str` into\n * an object containing the mapped fields.\n *\n * @param {String} str\n * @return {Object}\n * @api private\n */\n\nfunction parseHeader(string_) {\n  const lines = string_.split(/\\r?\\n/);\n  const fields = {};\n  let index;\n  let line;\n  let field;\n  let value;\n\n  for (let i = 0, length_ = lines.length; i < length_; ++i) {\n    line = lines[i];\n    index = line.indexOf(':');\n    if (index === -1) {\n      // could be empty line, just skip it\n      continue;\n    }\n\n    field = line.slice(0, index).toLowerCase();\n    value = trim(line.slice(index + 1));\n    fields[field] = value;\n  }\n\n  return fields;\n}\n\n/**\n * Check if `mime` is json or has +json structured syntax suffix.\n *\n * @param {String} mime\n * @return {Boolean}\n * @api private\n */\n\nfunction isJSON(mime) {\n  // should match /json or +json\n  // but not /json-seq\n  return /[/+]json($|[^-\\w])/i.test(mime);\n}\n\n/**\n * Initialize a new `Response` with the given `xhr`.\n *\n *  - set flags (.ok, .error, etc)\n *  - parse header\n *\n * Examples:\n *\n *  Aliasing `superagent` as `request` is nice:\n *\n *      request = superagent;\n *\n *  We can use the promise-like API, or pass callbacks:\n *\n *      request.get('/').end(function(res){});\n *      request.get('/', function(res){});\n *\n *  Sending data can be chained:\n *\n *      request\n *        .post('/user')\n *        .send({ name: 'tj' })\n *        .end(function(res){});\n *\n *  Or passed to `.send()`:\n *\n *      request\n *        .post('/user')\n *        .send({ name: 'tj' }, function(res){});\n *\n *  Or passed to `.post()`:\n *\n *      request\n *        .post('/user', { name: 'tj' })\n *        .end(function(res){});\n *\n * Or further reduced to a single call for simple cases:\n *\n *      request\n *        .post('/user', { name: 'tj' }, function(res){});\n *\n * @param {XMLHTTPRequest} xhr\n * @param {Object} options\n * @api private\n */\n\nfunction Response(request_) {\n  this.req = request_;\n  this.xhr = this.req.xhr;\n  // responseText is accessible only if responseType is '' or 'text' and on older browsers\n  this.text =\n    (this.req.method !== 'HEAD' &&\n      (this.xhr.responseType === '' || this.xhr.responseType === 'text')) ||\n    typeof this.xhr.responseType === 'undefined'\n      ? this.xhr.responseText\n      : null;\n  this.statusText = this.req.xhr.statusText;\n  let { status } = this.xhr;\n  // handle IE9 bug: http://stackoverflow.com/questions/10046972/msie-returns-status-code-of-1223-for-ajax-request\n  if (status === 1223) {\n    status = 204;\n  }\n\n  this._setStatusProperties(status);\n  this.headers = parseHeader(this.xhr.getAllResponseHeaders());\n  this.header = this.headers;\n  // getAllResponseHeaders sometimes falsely returns \"\" for CORS requests, but\n  // getResponseHeader still works. so we get content-type even if getting\n  // other headers fails.\n  this.header['content-type'] = this.xhr.getResponseHeader('content-type');\n  this._setHeaderProperties(this.header);\n\n  if (this.text === null && request_._responseType) {\n    this.body = this.xhr.response;\n  } else {\n    this.body =\n      this.req.method === 'HEAD'\n        ? null\n        : this._parseBody(this.text ? this.text : this.xhr.response);\n  }\n}\n\nmixin(Response.prototype, ResponseBase.prototype);\n\n/**\n * Parse the given body `str`.\n *\n * Used for auto-parsing of bodies. Parsers\n * are defined on the `superagent.parse` object.\n *\n * @param {String} str\n * @return {Mixed}\n * @api private\n */\n\nResponse.prototype._parseBody = function (string_) {\n  let parse = request.parse[this.type];\n  if (this.req._parser) {\n    return this.req._parser(this, string_);\n  }\n\n  if (!parse && isJSON(this.type)) {\n    parse = request.parse['application/json'];\n  }\n\n  return parse && string_ && (string_.length > 0 || string_ instanceof Object)\n    ? parse(string_)\n    : null;\n};\n\n/**\n * Return an `Error` representative of this response.\n *\n * @return {Error}\n * @api public\n */\n\nResponse.prototype.toError = function () {\n  const { req } = this;\n  const { method } = req;\n  const { url } = req;\n\n  const message = `cannot ${method} ${url} (${this.status})`;\n  const error = new Error(message);\n  error.status = this.status;\n  error.method = method;\n  error.url = url;\n\n  return error;\n};\n\n/**\n * Expose `Response`.\n */\n\nrequest.Response = Response;\n\n/**\n * Initialize a new `Request` with the given `method` and `url`.\n *\n * @param {String} method\n * @param {String} url\n * @api public\n */\n\nfunction Request(method, url) {\n  const self = this;\n  this._query = this._query || [];\n  this.method = method;\n  this.url = url;\n  this.header = {}; // preserves header name case\n  this._header = {}; // coerces header names to lowercase\n  this.on('end', () => {\n    let error = null;\n    let res = null;\n\n    try {\n      res = new Response(self);\n    } catch (err) {\n      error = new Error('Parser is unable to parse the response');\n      error.parse = true;\n      error.original = err;\n      // issue #675: return the raw response if the response parsing fails\n      if (self.xhr) {\n        // ie9 doesn't have 'response' property\n        error.rawResponse =\n          typeof self.xhr.responseType === 'undefined'\n            ? self.xhr.responseText\n            : self.xhr.response;\n        // issue #876: return the http status code if the response parsing fails\n        error.status = self.xhr.status ? self.xhr.status : null;\n        error.statusCode = error.status; // backwards-compat only\n      } else {\n        error.rawResponse = null;\n        error.status = null;\n      }\n\n      return self.callback(error);\n    }\n\n    self.emit('response', res);\n\n    let new_error;\n    try {\n      if (!self._isResponseOK(res)) {\n        new_error = new Error(\n          res.statusText || res.text || 'Unsuccessful HTTP response'\n        );\n      }\n    } catch (err) {\n      new_error = err; // ok() callback can throw\n    }\n\n    // #1000 don't catch errors from the callback to avoid double calling it\n    if (new_error) {\n      new_error.original = error;\n      new_error.response = res;\n      new_error.status = res.status;\n      self.callback(new_error, res);\n    } else {\n      self.callback(null, res);\n    }\n  });\n}\n\n/**\n * Mixin `Emitter` and `RequestBase`.\n */\n\n// eslint-disable-next-line new-cap\nEmitter(Request.prototype);\n\nmixin(Request.prototype, RequestBase.prototype);\n\n/**\n * Set Content-Type to `type`, mapping values from `request.types`.\n *\n * Examples:\n *\n *      superagent.types.xml = 'application/xml';\n *\n *      request.post('/')\n *        .type('xml')\n *        .send(xmlstring)\n *        .end(callback);\n *\n *      request.post('/')\n *        .type('application/xml')\n *        .send(xmlstring)\n *        .end(callback);\n *\n * @param {String} type\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.type = function (type) {\n  this.set('Content-Type', request.types[type] || type);\n  return this;\n};\n\n/**\n * Set Accept to `type`, mapping values from `request.types`.\n *\n * Examples:\n *\n *      superagent.types.json = 'application/json';\n *\n *      request.get('/agent')\n *        .accept('json')\n *        .end(callback);\n *\n *      request.get('/agent')\n *        .accept('application/json')\n *        .end(callback);\n *\n * @param {String} accept\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.accept = function (type) {\n  this.set('Accept', request.types[type] || type);\n  return this;\n};\n\n/**\n * Set Authorization field value with `user` and `pass`.\n *\n * @param {String} user\n * @param {String} [pass] optional in case of using 'bearer' as type\n * @param {Object} options with 'type' property 'auto', 'basic' or 'bearer' (default 'basic')\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.auth = function (user, pass, options) {\n  if (arguments.length === 1) pass = '';\n  if (typeof pass === 'object' && pass !== null) {\n    // pass is optional and can be replaced with options\n    options = pass;\n    pass = '';\n  }\n\n  if (!options) {\n    options = {\n      type: typeof btoa === 'function' ? 'basic' : 'auto'\n    };\n  }\n\n  const encoder = options.encoder\n    ? options.encoder\n    : (string) => {\n        if (typeof btoa === 'function') {\n          return btoa(string);\n        }\n\n        throw new Error('Cannot use basic auth, btoa is not a function');\n      };\n\n  return this._auth(user, pass, options, encoder);\n};\n\n/**\n * Add query-string `val`.\n *\n * Examples:\n *\n *   request.get('/shoes')\n *     .query('size=10')\n *     .query({ color: 'blue' })\n *\n * @param {Object|String} val\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.query = function (value) {\n  if (typeof value !== 'string') value = serialize(value);\n  if (value) this._query.push(value);\n  return this;\n};\n\n/**\n * Queue the given `file` as an attachment to the specified `field`,\n * with optional `options` (or filename).\n *\n * ``` js\n * request.post('/upload')\n *   .attach('content', new Blob(['<a id=\"a\"><b id=\"b\">hey!</b></a>'], { type: \"text/html\"}))\n *   .end(callback);\n * ```\n *\n * @param {String} field\n * @param {Blob|File} file\n * @param {String|Object} options\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.attach = function (field, file, options) {\n  if (file) {\n    if (this._data) {\n      throw new Error(\"superagent can't mix .send() and .attach()\");\n    }\n\n    this._getFormData().append(field, file, options || file.name);\n  }\n\n  return this;\n};\n\nRequest.prototype._getFormData = function () {\n  if (!this._formData) {\n    this._formData = new root.FormData();\n  }\n\n  return this._formData;\n};\n\n/**\n * Invoke the callback with `err` and `res`\n * and handle arity check.\n *\n * @param {Error} err\n * @param {Response} res\n * @api private\n */\n\nRequest.prototype.callback = function (error, res) {\n  if (this._shouldRetry(error, res)) {\n    return this._retry();\n  }\n\n  const fn = this._callback;\n  this.clearTimeout();\n\n  if (error) {\n    if (this._maxRetries) error.retries = this._retries - 1;\n    this.emit('error', error);\n  }\n\n  fn(error, res);\n};\n\n/**\n * Invoke callback with x-domain error.\n *\n * @api private\n */\n\nRequest.prototype.crossDomainError = function () {\n  const error = new Error(\n    'Request has been terminated\\nPossible causes: the network is offline, Origin is not allowed by Access-Control-Allow-Origin, the page is being unloaded, etc.'\n  );\n  error.crossDomain = true;\n\n  error.status = this.status;\n  error.method = this.method;\n  error.url = this.url;\n\n  this.callback(error);\n};\n\n// This only warns, because the request is still likely to work\nRequest.prototype.agent = function () {\n  console.warn('This is not supported in browser version of superagent');\n  return this;\n};\n\nRequest.prototype.ca = Request.prototype.agent;\nRequest.prototype.buffer = Request.prototype.ca;\n\n// This throws, because it can't send/receive data as expected\nRequest.prototype.write = () => {\n  throw new Error(\n    'Streaming is not supported in browser version of superagent'\n  );\n};\n\nRequest.prototype.pipe = Request.prototype.write;\n\n/**\n * Check if `obj` is a host object,\n * we don't want to serialize these :)\n *\n * @param {Object} obj host object\n * @return {Boolean} is a host object\n * @api private\n */\nRequest.prototype._isHost = function (object) {\n  // Native objects stringify to [object File], [object Blob], [object FormData], etc.\n  return (\n    object &&\n    typeof object === 'object' &&\n    !Array.isArray(object) &&\n    Object.prototype.toString.call(object) !== '[object Object]'\n  );\n};\n\n/**\n * Initiate request, invoking callback `fn(res)`\n * with an instanceof `Response`.\n *\n * @param {Function} fn\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.end = function (fn) {\n  if (this._endCalled) {\n    console.warn(\n      'Warning: .end() was called twice. This is not supported in superagent'\n    );\n  }\n\n  this._endCalled = true;\n\n  // store callback\n  this._callback = fn || noop;\n\n  // querystring\n  this._finalizeQueryString();\n\n  this._end();\n};\n\nRequest.prototype._setUploadTimeout = function () {\n  const self = this;\n\n  // upload timeout it's wokrs only if deadline timeout is off\n  if (this._uploadTimeout && !this._uploadTimeoutTimer) {\n    this._uploadTimeoutTimer = setTimeout(() => {\n      self._timeoutError(\n        'Upload timeout of ',\n        self._uploadTimeout,\n        'ETIMEDOUT'\n      );\n    }, this._uploadTimeout);\n  }\n};\n\n// eslint-disable-next-line complexity\nRequest.prototype._end = function () {\n  if (this._aborted)\n    return this.callback(\n      new Error('The request has been aborted even before .end() was called')\n    );\n\n  const self = this;\n  this.xhr = request.getXHR();\n  const { xhr } = this;\n  let data = this._formData || this._data;\n\n  this._setTimeouts();\n\n  // state change\n  xhr.addEventListener('readystatechange', () => {\n    const { readyState } = xhr;\n    if (readyState >= 2 && self._responseTimeoutTimer) {\n      clearTimeout(self._responseTimeoutTimer);\n    }\n\n    if (readyState !== 4) {\n      return;\n    }\n\n    // In IE9, reads to any property (e.g. status) off of an aborted XHR will\n    // result in the error \"Could not complete the operation due to error c00c023f\"\n    let status;\n    try {\n      status = xhr.status;\n    } catch {\n      status = 0;\n    }\n\n    if (!status) {\n      if (self.timedout || self._aborted) return;\n      return self.crossDomainError();\n    }\n\n    self.emit('end');\n  });\n\n  // progress\n  const handleProgress = (direction, e) => {\n    if (e.total > 0) {\n      e.percent = (e.loaded / e.total) * 100;\n\n      if (e.percent === 100) {\n        clearTimeout(self._uploadTimeoutTimer);\n      }\n    }\n\n    e.direction = direction;\n    self.emit('progress', e);\n  };\n\n  if (this.hasListeners('progress')) {\n    try {\n      xhr.addEventListener('progress', handleProgress.bind(null, 'download'));\n      if (xhr.upload) {\n        xhr.upload.addEventListener(\n          'progress',\n          handleProgress.bind(null, 'upload')\n        );\n      }\n    } catch {\n      // Accessing xhr.upload fails in IE from a web worker, so just pretend it doesn't exist.\n      // Reported here:\n      // https://connect.microsoft.com/IE/feedback/details/837245/xmlhttprequest-upload-throws-invalid-argument-when-used-from-web-worker-context\n    }\n  }\n\n  if (xhr.upload) {\n    this._setUploadTimeout();\n  }\n\n  // initiate request\n  try {\n    if (this.username && this.password) {\n      xhr.open(this.method, this.url, true, this.username, this.password);\n    } else {\n      xhr.open(this.method, this.url, true);\n    }\n  } catch (err) {\n    // see #1149\n    return this.callback(err);\n  }\n\n  // CORS\n  if (this._withCredentials) xhr.withCredentials = true;\n\n  // body\n  if (\n    !this._formData &&\n    this.method !== 'GET' &&\n    this.method !== 'HEAD' &&\n    typeof data !== 'string' &&\n    !this._isHost(data)\n  ) {\n    // serialize stuff\n    const contentType = this._header['content-type'];\n    let serialize =\n      this._serializer ||\n      request.serialize[contentType ? contentType.split(';')[0] : ''];\n    if (!serialize && isJSON(contentType)) {\n      serialize = request.serialize['application/json'];\n    }\n\n    if (serialize) data = serialize(data);\n  }\n\n  // set header fields\n  for (const field in this.header) {\n    if (this.header[field] === null) continue;\n\n    if (hasOwn(this.header, field))\n      xhr.setRequestHeader(field, this.header[field]);\n  }\n\n  if (this._responseType) {\n    xhr.responseType = this._responseType;\n  }\n\n  // send stuff\n  this.emit('request', this);\n\n  // IE11 xhr.send(undefined) sends 'undefined' string as POST payload (instead of nothing)\n  // We need null here if data is undefined\n  xhr.send(typeof data === 'undefined' ? null : data);\n};\n\nrequest.agent = () => new Agent();\n\nfor (const method of ['GET', 'POST', 'OPTIONS', 'PATCH', 'PUT', 'DELETE']) {\n  Agent.prototype[method.toLowerCase()] = function (url, fn) {\n    const request_ = new request.Request(method, url);\n    this._setDefaults(request_);\n    if (fn) {\n      request_.end(fn);\n    }\n\n    return request_;\n  };\n}\n\nAgent.prototype.del = Agent.prototype.delete;\n\n/**\n * GET `url` with optional callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed|Function} [data] or fn\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nrequest.get = (url, data, fn) => {\n  const request_ = request('GET', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.query(data);\n  if (fn) request_.end(fn);\n  return request_;\n};\n\n/**\n * HEAD `url` with optional callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed|Function} [data] or fn\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nrequest.head = (url, data, fn) => {\n  const request_ = request('HEAD', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.query(data);\n  if (fn) request_.end(fn);\n  return request_;\n};\n\n/**\n * OPTIONS query to `url` with optional callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed|Function} [data] or fn\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nrequest.options = (url, data, fn) => {\n  const request_ = request('OPTIONS', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.send(data);\n  if (fn) request_.end(fn);\n  return request_;\n};\n\n/**\n * DELETE `url` with optional `data` and callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed} [data]\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nfunction del(url, data, fn) {\n  const request_ = request('DELETE', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.send(data);\n  if (fn) request_.end(fn);\n  return request_;\n}\n\nrequest.del = del;\nrequest.delete = del;\n\n/**\n * PATCH `url` with optional `data` and callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed} [data]\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nrequest.patch = (url, data, fn) => {\n  const request_ = request('PATCH', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.send(data);\n  if (fn) request_.end(fn);\n  return request_;\n};\n\n/**\n * POST `url` with optional `data` and callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed} [data]\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nrequest.post = (url, data, fn) => {\n  const request_ = request('POST', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.send(data);\n  if (fn) request_.end(fn);\n  return request_;\n};\n\n/**\n * PUT `url` with optional `data` and callback `fn(res)`.\n *\n * @param {String} url\n * @param {Mixed|Function} [data] or fn\n * @param {Function} [fn]\n * @return {Request}\n * @api public\n */\n\nrequest.put = (url, data, fn) => {\n  const request_ = request('PUT', url);\n  if (typeof data === 'function') {\n    fn = data;\n    data = null;\n  }\n\n  if (data) request_.send(data);\n  if (fn) request_.end(fn);\n  return request_;\n};\n"],"mappings":";;;;;;;;;;AAAA;AACA;AACA;AAEA,IAAIA,IAAJ;;AACA,IAAI,OAAOC,MAAP,KAAkB,WAAtB,EAAmC;EACjC;EACAD,IAAI,GAAGC,MAAP;AACD,CAHD,MAGO,IAAI,OAAOC,IAAP,KAAgB,WAApB,EAAiC;EACtC;EACAC,OAAO,CAACC,IAAR,CACE,qEADF;EAGAJ,IAAI,SAAJ;AACD,CANM,MAMA;EACL;EACAA,IAAI,GAAGE,IAAP;AACD;;AAED,IAAMG,OAAO,GAAGC,OAAO,CAAC,mBAAD,CAAvB;;AACA,IAAMC,aAAa,GAAGD,OAAO,CAAC,qBAAD,CAA7B;;AACA,IAAME,EAAE,GAAGF,OAAO,CAAC,IAAD,CAAlB;;AACA,IAAMG,WAAW,GAAGH,OAAO,CAAC,gBAAD,CAA3B;;AACA,eAAoCA,OAAO,CAAC,SAAD,CAA3C;AAAA,IAAQI,QAAR,YAAQA,QAAR;AAAA,IAAkBC,KAAlB,YAAkBA,KAAlB;AAAA,IAAyBC,MAAzB,YAAyBA,MAAzB;;AACA,IAAMC,YAAY,GAAGP,OAAO,CAAC,iBAAD,CAA5B;;AACA,IAAMQ,KAAK,GAAGR,OAAO,CAAC,cAAD,CAArB;AAEA;AACA;AACA;;;AAEA,SAASS,IAAT,GAAgB,CAAE;AAElB;AACA;AACA;;;AAEAC,MAAM,CAACC,OAAP,GAAiB,UAAUC,MAAV,EAAkBC,GAAlB,EAAuB;EACtC;EACA,IAAI,OAAOA,GAAP,KAAe,UAAnB,EAA+B;IAC7B,OAAO,IAAIF,OAAO,CAACG,OAAZ,CAAoB,KAApB,EAA2BF,MAA3B,EAAmCG,GAAnC,CAAuCF,GAAvC,CAAP;EACD,CAJqC,CAMtC;;;EACA,IAAIG,SAAS,CAACC,MAAV,KAAqB,CAAzB,EAA4B;IAC1B,OAAO,IAAIN,OAAO,CAACG,OAAZ,CAAoB,KAApB,EAA2BF,MAA3B,CAAP;EACD;;EAED,OAAO,IAAID,OAAO,CAACG,OAAZ,CAAoBF,MAApB,EAA4BC,GAA5B,CAAP;AACD,CAZD;;AAcAF,OAAO,GAAGD,MAAM,CAACC,OAAjB;AAEA,IAAMO,OAAO,GAAGP,OAAhB;AAEAA,OAAO,CAACG,OAAR,GAAkBA,OAAlB;AAEA;AACA;AACA;;AAEAI,OAAO,CAACC,MAAR,GAAiB,YAAM;EACrB,IACEzB,IAAI,CAAC0B,cAAL,KACC,CAAC1B,IAAI,CAAC2B,QAAN,IACC3B,IAAI,CAAC2B,QAAL,CAAcC,QAAd,KAA2B,OAD5B,IAEC,CAAC5B,IAAI,CAAC6B,aAHR,CADF,EAKE;IACA,OAAO,IAAIH,cAAJ,EAAP;EACD;;EAED,IAAI;IACF,OAAO,IAAIG,aAAJ,CAAkB,mBAAlB,CAAP;EACD,CAFD,CAEE,gBAAM;IACN;EACD;;EAED,IAAI;IACF,OAAO,IAAIA,aAAJ,CAAkB,oBAAlB,CAAP;EACD,CAFD,CAEE,iBAAM;IACN;EACD;;EAED,IAAI;IACF,OAAO,IAAIA,aAAJ,CAAkB,oBAAlB,CAAP;EACD,CAFD,CAEE,iBAAM;IACN;EACD;;EAED,IAAI;IACF,OAAO,IAAIA,aAAJ,CAAkB,gBAAlB,CAAP;EACD,CAFD,CAEE,iBAAM;IACN;EACD;;EAED,MAAM,IAAIC,KAAJ,CAAU,uDAAV,CAAN;AACD,CAnCD;AAqCA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,IAAMC,IAAI,GAAG,GAAGA,IAAH,GAAU,UAACC,CAAD;EAAA,OAAOA,CAAC,CAACD,IAAF,EAAP;AAAA,CAAV,GAA4B,UAACC,CAAD;EAAA,OAAOA,CAAC,CAACC,OAAF,CAAU,cAAV,EAA0B,EAA1B,CAAP;AAAA,CAAzC;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAASC,SAAT,CAAmBC,MAAnB,EAA2B;EACzB,IAAI,CAACzB,QAAQ,CAACyB,MAAD,CAAb,EAAuB,OAAOA,MAAP;EACvB,IAAMC,KAAK,GAAG,EAAd;;EACA,KAAK,IAAMC,GAAX,IAAkBF,MAAlB,EAA0B;IACxB,IAAIvB,MAAM,CAACuB,MAAD,EAASE,GAAT,CAAV,EAAyBC,uBAAuB,CAACF,KAAD,EAAQC,GAAR,EAAaF,MAAM,CAACE,GAAD,CAAnB,CAAvB;EAC1B;;EAED,OAAOD,KAAK,CAACG,IAAN,CAAW,GAAX,CAAP;AACD;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAASD,uBAAT,CAAiCF,KAAjC,EAAwCC,GAAxC,EAA6CG,KAA7C,EAAoD;EAClD,IAAIA,KAAK,KAAKC,SAAd,EAAyB;;EACzB,IAAID,KAAK,KAAK,IAAd,EAAoB;IAClBJ,KAAK,CAACM,IAAN,CAAWC,SAAS,CAACN,GAAD,CAApB;IACA;EACD;;EAED,IAAIO,KAAK,CAACC,OAAN,CAAcL,KAAd,CAAJ,EAA0B;IAAA,2CACRA,KADQ;IAAA;;IAAA;MACxB,oDAAuB;QAAA,IAAZM,CAAY;QACrBR,uBAAuB,CAACF,KAAD,EAAQC,GAAR,EAAaS,CAAb,CAAvB;MACD;IAHuB;MAAA;IAAA;MAAA;IAAA;EAIzB,CAJD,MAIO,IAAIpC,QAAQ,CAAC8B,KAAD,CAAZ,EAAqB;IAC1B,KAAK,IAAMO,MAAX,IAAqBP,KAArB,EAA4B;MAC1B,IAAI5B,MAAM,CAAC4B,KAAD,EAAQO,MAAR,CAAV,EACET,uBAAuB,CAACF,KAAD,YAAWC,GAAX,cAAkBU,MAAlB,QAA6BP,KAAK,CAACO,MAAD,CAAlC,CAAvB;IACH;EACF,CALM,MAKA;IACLX,KAAK,CAACM,IAAN,CAAWC,SAAS,CAACN,GAAD,CAAT,GAAiB,GAAjB,GAAuBW,kBAAkB,CAACR,KAAD,CAApD;EACD;AACF;AAED;AACA;AACA;;;AAEAhB,OAAO,CAACyB,eAAR,GAA0Bf,SAA1B;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAASgB,WAAT,CAAqBC,OAArB,EAA8B;EAC5B,IAAMhB,MAAM,GAAG,EAAf;EACA,IAAMC,KAAK,GAAGe,OAAO,CAACC,KAAR,CAAc,GAAd,CAAd;EACA,IAAIC,IAAJ;EACA,IAAIC,GAAJ;;EAEA,KAAK,IAAIC,CAAC,GAAG,CAAR,EAAWC,OAAO,GAAGpB,KAAK,CAACb,MAAhC,EAAwCgC,CAAC,GAAGC,OAA5C,EAAqD,EAAED,CAAvD,EAA0D;IACxDF,IAAI,GAAGjB,KAAK,CAACmB,CAAD,CAAZ;IACAD,GAAG,GAAGD,IAAI,CAACI,OAAL,CAAa,GAAb,CAAN;;IACA,IAAIH,GAAG,KAAK,CAAC,CAAb,EAAgB;MACdnB,MAAM,CAACuB,kBAAkB,CAACL,IAAD,CAAnB,CAAN,GAAmC,EAAnC;IACD,CAFD,MAEO;MACLlB,MAAM,CAACuB,kBAAkB,CAACL,IAAI,CAACM,KAAL,CAAW,CAAX,EAAcL,GAAd,CAAD,CAAnB,CAAN,GAAiDI,kBAAkB,CACjEL,IAAI,CAACM,KAAL,CAAWL,GAAG,GAAG,CAAjB,CADiE,CAAnE;IAGD;EACF;;EAED,OAAOnB,MAAP;AACD;AAED;AACA;AACA;;;AAEAX,OAAO,CAAC0B,WAAR,GAAsBA,WAAtB;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA1B,OAAO,CAACoC,KAAR,GAAgB;EACdC,IAAI,EAAE,WADQ;EAEdC,IAAI,EAAE,kBAFQ;EAGdC,GAAG,EAAE,UAHS;EAIdC,UAAU,EAAE,mCAJE;EAKdC,IAAI,EAAE,mCALQ;EAMd,aAAa;AANC,CAAhB;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEAzC,OAAO,CAACU,SAAR,GAAoB;EAClB,qCAAqC1B,EAAE,CAAC0D,SADtB;EAElB,oBAAoB3D;AAFF,CAApB;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEAiB,OAAO,CAAC2C,KAAR,GAAgB;EACd,qCAAqCjB,WADvB;EAEd,oBAAoBkB,IAAI,CAACD;AAFX,CAAhB;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAASE,WAAT,CAAqBlB,OAArB,EAA8B;EAC5B,IAAMmB,KAAK,GAAGnB,OAAO,CAACC,KAAR,CAAc,OAAd,CAAd;EACA,IAAMmB,MAAM,GAAG,EAAf;EACA,IAAIC,KAAJ;EACA,IAAIC,IAAJ;EACA,IAAIC,KAAJ;EACA,IAAIlC,KAAJ;;EAEA,KAAK,IAAIe,CAAC,GAAG,CAAR,EAAWC,OAAO,GAAGc,KAAK,CAAC/C,MAAhC,EAAwCgC,CAAC,GAAGC,OAA5C,EAAqD,EAAED,CAAvD,EAA0D;IACxDkB,IAAI,GAAGH,KAAK,CAACf,CAAD,CAAZ;IACAiB,KAAK,GAAGC,IAAI,CAAChB,OAAL,CAAa,GAAb,CAAR;;IACA,IAAIe,KAAK,KAAK,CAAC,CAAf,EAAkB;MAChB;MACA;IACD;;IAEDE,KAAK,GAAGD,IAAI,CAACd,KAAL,CAAW,CAAX,EAAca,KAAd,EAAqBG,WAArB,EAAR;IACAnC,KAAK,GAAGT,IAAI,CAAC0C,IAAI,CAACd,KAAL,CAAWa,KAAK,GAAG,CAAnB,CAAD,CAAZ;IACAD,MAAM,CAACG,KAAD,CAAN,GAAgBlC,KAAhB;EACD;;EAED,OAAO+B,MAAP;AACD;AAED;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAASK,MAAT,CAAgBC,IAAhB,EAAsB;EACpB;EACA;EACA,OAAO,sBAAsBC,IAAtB,CAA2BD,IAA3B,CAAP;AACD;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAASE,QAAT,CAAkBC,QAAlB,EAA4B;EAC1B,KAAKC,GAAL,GAAWD,QAAX;EACA,KAAKE,GAAL,GAAW,KAAKD,GAAL,CAASC,GAApB,CAF0B,CAG1B;;EACA,KAAKC,IAAL,GACG,KAAKF,GAAL,CAAS/D,MAAT,KAAoB,MAApB,KACE,KAAKgE,GAAL,CAASE,YAAT,KAA0B,EAA1B,IAAgC,KAAKF,GAAL,CAASE,YAAT,KAA0B,MAD5D,CAAD,IAEA,OAAO,KAAKF,GAAL,CAASE,YAAhB,KAAiC,WAFjC,GAGI,KAAKF,GAAL,CAASG,YAHb,GAII,IALN;EAMA,KAAKC,UAAL,GAAkB,KAAKL,GAAL,CAASC,GAAT,CAAaI,UAA/B;EACA,IAAMC,MAAN,GAAiB,KAAKL,GAAtB,CAAMK,MAAN,CAX0B,CAY1B;;EACA,IAAIA,MAAM,KAAK,IAAf,EAAqB;IACnBA,MAAM,GAAG,GAAT;EACD;;EAED,KAAKC,oBAAL,CAA0BD,MAA1B;;EACA,KAAKE,OAAL,GAAepB,WAAW,CAAC,KAAKa,GAAL,CAASQ,qBAAT,EAAD,CAA1B;EACA,KAAKC,MAAL,GAAc,KAAKF,OAAnB,CAnB0B,CAoB1B;EACA;EACA;;EACA,KAAKE,MAAL,CAAY,cAAZ,IAA8B,KAAKT,GAAL,CAASU,iBAAT,CAA2B,cAA3B,CAA9B;;EACA,KAAKC,oBAAL,CAA0B,KAAKF,MAA/B;;EAEA,IAAI,KAAKR,IAAL,KAAc,IAAd,IAAsBH,QAAQ,CAACc,aAAnC,EAAkD;IAChD,KAAKC,IAAL,GAAY,KAAKb,GAAL,CAASc,QAArB;EACD,CAFD,MAEO;IACL,KAAKD,IAAL,GACE,KAAKd,GAAL,CAAS/D,MAAT,KAAoB,MAApB,GACI,IADJ,GAEI,KAAK+E,UAAL,CAAgB,KAAKd,IAAL,GAAY,KAAKA,IAAjB,GAAwB,KAAKD,GAAL,CAASc,QAAjD,CAHN;EAID;AACF;;AAEDrF,KAAK,CAACoE,QAAQ,CAACmB,SAAV,EAAqBrF,YAAY,CAACqF,SAAlC,CAAL;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEAnB,QAAQ,CAACmB,SAAT,CAAmBD,UAAnB,GAAgC,UAAU9C,OAAV,EAAmB;EACjD,IAAIgB,KAAK,GAAG3C,OAAO,CAAC2C,KAAR,CAAc,KAAKgC,IAAnB,CAAZ;;EACA,IAAI,KAAKlB,GAAL,CAASmB,OAAb,EAAsB;IACpB,OAAO,KAAKnB,GAAL,CAASmB,OAAT,CAAiB,IAAjB,EAAuBjD,OAAvB,CAAP;EACD;;EAED,IAAI,CAACgB,KAAD,IAAUS,MAAM,CAAC,KAAKuB,IAAN,CAApB,EAAiC;IAC/BhC,KAAK,GAAG3C,OAAO,CAAC2C,KAAR,CAAc,kBAAd,CAAR;EACD;;EAED,OAAOA,KAAK,IAAIhB,OAAT,KAAqBA,OAAO,CAAC5B,MAAR,GAAiB,CAAjB,IAAsB4B,OAAO,YAAYkD,MAA9D,IACHlC,KAAK,CAAChB,OAAD,CADF,GAEH,IAFJ;AAGD,CAbD;AAeA;AACA;AACA;AACA;AACA;AACA;;;AAEA4B,QAAQ,CAACmB,SAAT,CAAmBI,OAAnB,GAA6B,YAAY;EACvC,IAAQrB,GAAR,GAAgB,IAAhB,CAAQA,GAAR;EACA,IAAQ/D,MAAR,GAAmB+D,GAAnB,CAAQ/D,MAAR;EACA,IAAQC,GAAR,GAAgB8D,GAAhB,CAAQ9D,GAAR;EAEA,IAAMoF,OAAO,oBAAarF,MAAb,cAAuBC,GAAvB,eAA+B,KAAKoE,MAApC,MAAb;EACA,IAAMiB,KAAK,GAAG,IAAI1E,KAAJ,CAAUyE,OAAV,CAAd;EACAC,KAAK,CAACjB,MAAN,GAAe,KAAKA,MAApB;EACAiB,KAAK,CAACtF,MAAN,GAAeA,MAAf;EACAsF,KAAK,CAACrF,GAAN,GAAYA,GAAZ;EAEA,OAAOqF,KAAP;AACD,CAZD;AAcA;AACA;AACA;;;AAEAhF,OAAO,CAACuD,QAAR,GAAmBA,QAAnB;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAAS3D,OAAT,CAAiBF,MAAjB,EAAyBC,GAAzB,EAA8B;EAC5B,IAAMjB,IAAI,GAAG,IAAb;EACA,KAAKuG,MAAL,GAAc,KAAKA,MAAL,IAAe,EAA7B;EACA,KAAKvF,MAAL,GAAcA,MAAd;EACA,KAAKC,GAAL,GAAWA,GAAX;EACA,KAAKwE,MAAL,GAAc,EAAd,CAL4B,CAKV;;EAClB,KAAKe,OAAL,GAAe,EAAf,CAN4B,CAMT;;EACnB,KAAKC,EAAL,CAAQ,KAAR,EAAe,YAAM;IACnB,IAAIH,KAAK,GAAG,IAAZ;IACA,IAAII,GAAG,GAAG,IAAV;;IAEA,IAAI;MACFA,GAAG,GAAG,IAAI7B,QAAJ,CAAa7E,IAAb,CAAN;IACD,CAFD,CAEE,OAAO2G,GAAP,EAAY;MACZL,KAAK,GAAG,IAAI1E,KAAJ,CAAU,wCAAV,CAAR;MACA0E,KAAK,CAACrC,KAAN,GAAc,IAAd;MACAqC,KAAK,CAACM,QAAN,GAAiBD,GAAjB,CAHY,CAIZ;;MACA,IAAI3G,IAAI,CAACgF,GAAT,EAAc;QACZ;QACAsB,KAAK,CAACO,WAAN,GACE,OAAO7G,IAAI,CAACgF,GAAL,CAASE,YAAhB,KAAiC,WAAjC,GACIlF,IAAI,CAACgF,GAAL,CAASG,YADb,GAEInF,IAAI,CAACgF,GAAL,CAASc,QAHf,CAFY,CAMZ;;QACAQ,KAAK,CAACjB,MAAN,GAAerF,IAAI,CAACgF,GAAL,CAASK,MAAT,GAAkBrF,IAAI,CAACgF,GAAL,CAASK,MAA3B,GAAoC,IAAnD;QACAiB,KAAK,CAACQ,UAAN,GAAmBR,KAAK,CAACjB,MAAzB,CARY,CAQqB;MAClC,CATD,MASO;QACLiB,KAAK,CAACO,WAAN,GAAoB,IAApB;QACAP,KAAK,CAACjB,MAAN,GAAe,IAAf;MACD;;MAED,OAAOrF,IAAI,CAAC+G,QAAL,CAAcT,KAAd,CAAP;IACD;;IAEDtG,IAAI,CAACgH,IAAL,CAAU,UAAV,EAAsBN,GAAtB;IAEA,IAAIO,SAAJ;;IACA,IAAI;MACF,IAAI,CAACjH,IAAI,CAACkH,aAAL,CAAmBR,GAAnB,CAAL,EAA8B;QAC5BO,SAAS,GAAG,IAAIrF,KAAJ,CACV8E,GAAG,CAACtB,UAAJ,IAAkBsB,GAAG,CAACzB,IAAtB,IAA8B,4BADpB,CAAZ;MAGD;IACF,CAND,CAME,OAAO0B,GAAP,EAAY;MACZM,SAAS,GAAGN,GAAZ,CADY,CACK;IAClB,CAvCkB,CAyCnB;;;IACA,IAAIM,SAAJ,EAAe;MACbA,SAAS,CAACL,QAAV,GAAqBN,KAArB;MACAW,SAAS,CAACnB,QAAV,GAAqBY,GAArB;MACAO,SAAS,CAAC5B,MAAV,GAAmBqB,GAAG,CAACrB,MAAvB;MACArF,IAAI,CAAC+G,QAAL,CAAcE,SAAd,EAAyBP,GAAzB;IACD,CALD,MAKO;MACL1G,IAAI,CAAC+G,QAAL,CAAc,IAAd,EAAoBL,GAApB;IACD;EACF,CAlDD;AAmDD;AAED;AACA;AACA;AAEA;;;AACAvG,OAAO,CAACe,OAAO,CAAC8E,SAAT,CAAP;AAEAvF,KAAK,CAACS,OAAO,CAAC8E,SAAT,EAAoBzF,WAAW,CAACyF,SAAhC,CAAL;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA9E,OAAO,CAAC8E,SAAR,CAAkBC,IAAlB,GAAyB,UAAUA,IAAV,EAAgB;EACvC,KAAKkB,GAAL,CAAS,cAAT,EAAyB7F,OAAO,CAACoC,KAAR,CAAcuC,IAAd,KAAuBA,IAAhD;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA/E,OAAO,CAAC8E,SAAR,CAAkBoB,MAAlB,GAA2B,UAAUnB,IAAV,EAAgB;EACzC,KAAKkB,GAAL,CAAS,QAAT,EAAmB7F,OAAO,CAACoC,KAAR,CAAcuC,IAAd,KAAuBA,IAA1C;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA/E,OAAO,CAAC8E,SAAR,CAAkBqB,IAAlB,GAAyB,UAAUC,IAAV,EAAgBC,IAAhB,EAAsBC,OAAtB,EAA+B;EACtD,IAAIpG,SAAS,CAACC,MAAV,KAAqB,CAAzB,EAA4BkG,IAAI,GAAG,EAAP;;EAC5B,IAAI,QAAOA,IAAP,MAAgB,QAAhB,IAA4BA,IAAI,KAAK,IAAzC,EAA+C;IAC7C;IACAC,OAAO,GAAGD,IAAV;IACAA,IAAI,GAAG,EAAP;EACD;;EAED,IAAI,CAACC,OAAL,EAAc;IACZA,OAAO,GAAG;MACRvB,IAAI,EAAE,OAAOwB,IAAP,KAAgB,UAAhB,GAA6B,OAA7B,GAAuC;IADrC,CAAV;EAGD;;EAED,IAAMC,OAAO,GAAGF,OAAO,CAACE,OAAR,GACZF,OAAO,CAACE,OADI,GAEZ,UAACC,MAAD,EAAY;IACV,IAAI,OAAOF,IAAP,KAAgB,UAApB,EAAgC;MAC9B,OAAOA,IAAI,CAACE,MAAD,CAAX;IACD;;IAED,MAAM,IAAI/F,KAAJ,CAAU,+CAAV,CAAN;EACD,CARL;EAUA,OAAO,KAAKgG,KAAL,CAAWN,IAAX,EAAiBC,IAAjB,EAAuBC,OAAvB,EAAgCE,OAAhC,CAAP;AACD,CAzBD;AA2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAxG,OAAO,CAAC8E,SAAR,CAAkB6B,KAAlB,GAA0B,UAAUvF,KAAV,EAAiB;EACzC,IAAI,OAAOA,KAAP,KAAiB,QAArB,EAA+BA,KAAK,GAAGN,SAAS,CAACM,KAAD,CAAjB;EAC/B,IAAIA,KAAJ,EAAW,KAAKiE,MAAL,CAAY/D,IAAZ,CAAiBF,KAAjB;EACX,OAAO,IAAP;AACD,CAJD;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEApB,OAAO,CAAC8E,SAAR,CAAkB8B,MAAlB,GAA2B,UAAUtD,KAAV,EAAiBuD,IAAjB,EAAuBP,OAAvB,EAAgC;EACzD,IAAIO,IAAJ,EAAU;IACR,IAAI,KAAKC,KAAT,EAAgB;MACd,MAAM,IAAIpG,KAAJ,CAAU,4CAAV,CAAN;IACD;;IAED,KAAKqG,YAAL,GAAoBC,MAApB,CAA2B1D,KAA3B,EAAkCuD,IAAlC,EAAwCP,OAAO,IAAIO,IAAI,CAACI,IAAxD;EACD;;EAED,OAAO,IAAP;AACD,CAVD;;AAYAjH,OAAO,CAAC8E,SAAR,CAAkBiC,YAAlB,GAAiC,YAAY;EAC3C,IAAI,CAAC,KAAKG,SAAV,EAAqB;IACnB,KAAKA,SAAL,GAAiB,IAAItI,IAAI,CAACuI,QAAT,EAAjB;EACD;;EAED,OAAO,KAAKD,SAAZ;AACD,CAND;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAlH,OAAO,CAAC8E,SAAR,CAAkBe,QAAlB,GAA6B,UAAUT,KAAV,EAAiBI,GAAjB,EAAsB;EACjD,IAAI,KAAK4B,YAAL,CAAkBhC,KAAlB,EAAyBI,GAAzB,CAAJ,EAAmC;IACjC,OAAO,KAAK6B,MAAL,EAAP;EACD;;EAED,IAAMC,EAAE,GAAG,KAAKC,SAAhB;EACA,KAAKC,YAAL;;EAEA,IAAIpC,KAAJ,EAAW;IACT,IAAI,KAAKqC,WAAT,EAAsBrC,KAAK,CAACsC,OAAN,GAAgB,KAAKC,QAAL,GAAgB,CAAhC;IACtB,KAAK7B,IAAL,CAAU,OAAV,EAAmBV,KAAnB;EACD;;EAEDkC,EAAE,CAAClC,KAAD,EAAQI,GAAR,CAAF;AACD,CAdD;AAgBA;AACA;AACA;AACA;AACA;;;AAEAxF,OAAO,CAAC8E,SAAR,CAAkB8C,gBAAlB,GAAqC,YAAY;EAC/C,IAAMxC,KAAK,GAAG,IAAI1E,KAAJ,CACZ,8JADY,CAAd;EAGA0E,KAAK,CAACyC,WAAN,GAAoB,IAApB;EAEAzC,KAAK,CAACjB,MAAN,GAAe,KAAKA,MAApB;EACAiB,KAAK,CAACtF,MAAN,GAAe,KAAKA,MAApB;EACAsF,KAAK,CAACrF,GAAN,GAAY,KAAKA,GAAjB;EAEA,KAAK8F,QAAL,CAAcT,KAAd;AACD,CAXD,C,CAaA;;;AACApF,OAAO,CAAC8E,SAAR,CAAkBgD,KAAlB,GAA0B,YAAY;EACpC/I,OAAO,CAACC,IAAR,CAAa,wDAAb;EACA,OAAO,IAAP;AACD,CAHD;;AAKAgB,OAAO,CAAC8E,SAAR,CAAkBiD,EAAlB,GAAuB/H,OAAO,CAAC8E,SAAR,CAAkBgD,KAAzC;AACA9H,OAAO,CAAC8E,SAAR,CAAkBkD,MAAlB,GAA2BhI,OAAO,CAAC8E,SAAR,CAAkBiD,EAA7C,C,CAEA;;AACA/H,OAAO,CAAC8E,SAAR,CAAkBmD,KAAlB,GAA0B,YAAM;EAC9B,MAAM,IAAIvH,KAAJ,CACJ,6DADI,CAAN;AAGD,CAJD;;AAMAV,OAAO,CAAC8E,SAAR,CAAkBoD,IAAlB,GAAyBlI,OAAO,CAAC8E,SAAR,CAAkBmD,KAA3C;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AACAjI,OAAO,CAAC8E,SAAR,CAAkBqD,OAAlB,GAA4B,UAAUpH,MAAV,EAAkB;EAC5C;EACA,OACEA,MAAM,IACN,QAAOA,MAAP,MAAkB,QADlB,IAEA,CAACS,KAAK,CAACC,OAAN,CAAcV,MAAd,CAFD,IAGAkE,MAAM,CAACH,SAAP,CAAiBsD,QAAjB,CAA0BC,IAA1B,CAA+BtH,MAA/B,MAA2C,iBAJ7C;AAMD,CARD;AAUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAf,OAAO,CAAC8E,SAAR,CAAkB7E,GAAlB,GAAwB,UAAUqH,EAAV,EAAc;EACpC,IAAI,KAAKgB,UAAT,EAAqB;IACnBvJ,OAAO,CAACC,IAAR,CACE,uEADF;EAGD;;EAED,KAAKsJ,UAAL,GAAkB,IAAlB,CAPoC,CASpC;;EACA,KAAKf,SAAL,GAAiBD,EAAE,IAAI3H,IAAvB,CAVoC,CAYpC;;EACA,KAAK4I,oBAAL;;EAEA,KAAKC,IAAL;AACD,CAhBD;;AAkBAxI,OAAO,CAAC8E,SAAR,CAAkB2D,iBAAlB,GAAsC,YAAY;EAChD,IAAM3J,IAAI,GAAG,IAAb,CADgD,CAGhD;;EACA,IAAI,KAAK4J,cAAL,IAAuB,CAAC,KAAKC,mBAAjC,EAAsD;IACpD,KAAKA,mBAAL,GAA2BC,UAAU,CAAC,YAAM;MAC1C9J,IAAI,CAAC+J,aAAL,CACE,oBADF,EAEE/J,IAAI,CAAC4J,cAFP,EAGE,WAHF;IAKD,CANoC,EAMlC,KAAKA,cAN6B,CAArC;EAOD;AACF,CAbD,C,CAeA;;;AACA1I,OAAO,CAAC8E,SAAR,CAAkB0D,IAAlB,GAAyB,YAAY;EACnC,IAAI,KAAKM,QAAT,EACE,OAAO,KAAKjD,QAAL,CACL,IAAInF,KAAJ,CAAU,4DAAV,CADK,CAAP;EAIF,IAAM5B,IAAI,GAAG,IAAb;EACA,KAAKgF,GAAL,GAAW1D,OAAO,CAACC,MAAR,EAAX;EACA,IAAQyD,GAAR,GAAgB,IAAhB,CAAQA,GAAR;EACA,IAAIiF,IAAI,GAAG,KAAK7B,SAAL,IAAkB,KAAKJ,KAAlC;;EAEA,KAAKkC,YAAL,GAXmC,CAanC;;;EACAlF,GAAG,CAACmF,gBAAJ,CAAqB,kBAArB,EAAyC,YAAM;IAC7C,IAAQC,UAAR,GAAuBpF,GAAvB,CAAQoF,UAAR;;IACA,IAAIA,UAAU,IAAI,CAAd,IAAmBpK,IAAI,CAACqK,qBAA5B,EAAmD;MACjD3B,YAAY,CAAC1I,IAAI,CAACqK,qBAAN,CAAZ;IACD;;IAED,IAAID,UAAU,KAAK,CAAnB,EAAsB;MACpB;IACD,CAR4C,CAU7C;IACA;;;IACA,IAAI/E,MAAJ;;IACA,IAAI;MACFA,MAAM,GAAGL,GAAG,CAACK,MAAb;IACD,CAFD,CAEE,iBAAM;MACNA,MAAM,GAAG,CAAT;IACD;;IAED,IAAI,CAACA,MAAL,EAAa;MACX,IAAIrF,IAAI,CAACsK,QAAL,IAAiBtK,IAAI,CAACgK,QAA1B,EAAoC;MACpC,OAAOhK,IAAI,CAAC8I,gBAAL,EAAP;IACD;;IAED9I,IAAI,CAACgH,IAAL,CAAU,KAAV;EACD,CAzBD,EAdmC,CAyCnC;;EACA,IAAMuD,cAAc,GAAG,SAAjBA,cAAiB,CAACC,SAAD,EAAYC,CAAZ,EAAkB;IACvC,IAAIA,CAAC,CAACC,KAAF,GAAU,CAAd,EAAiB;MACfD,CAAC,CAACE,OAAF,GAAaF,CAAC,CAACG,MAAF,GAAWH,CAAC,CAACC,KAAd,GAAuB,GAAnC;;MAEA,IAAID,CAAC,CAACE,OAAF,KAAc,GAAlB,EAAuB;QACrBjC,YAAY,CAAC1I,IAAI,CAAC6J,mBAAN,CAAZ;MACD;IACF;;IAEDY,CAAC,CAACD,SAAF,GAAcA,SAAd;IACAxK,IAAI,CAACgH,IAAL,CAAU,UAAV,EAAsByD,CAAtB;EACD,CAXD;;EAaA,IAAI,KAAKI,YAAL,CAAkB,UAAlB,CAAJ,EAAmC;IACjC,IAAI;MACF7F,GAAG,CAACmF,gBAAJ,CAAqB,UAArB,EAAiCI,cAAc,CAACO,IAAf,CAAoB,IAApB,EAA0B,UAA1B,CAAjC;;MACA,IAAI9F,GAAG,CAAC+F,MAAR,EAAgB;QACd/F,GAAG,CAAC+F,MAAJ,CAAWZ,gBAAX,CACE,UADF,EAEEI,cAAc,CAACO,IAAf,CAAoB,IAApB,EAA0B,QAA1B,CAFF;MAID;IACF,CARD,CAQE,iBAAM,CACN;MACA;MACA;IACD;EACF;;EAED,IAAI9F,GAAG,CAAC+F,MAAR,EAAgB;IACd,KAAKpB,iBAAL;EACD,CAzEkC,CA2EnC;;;EACA,IAAI;IACF,IAAI,KAAKqB,QAAL,IAAiB,KAAKC,QAA1B,EAAoC;MAClCjG,GAAG,CAACkG,IAAJ,CAAS,KAAKlK,MAAd,EAAsB,KAAKC,GAA3B,EAAgC,IAAhC,EAAsC,KAAK+J,QAA3C,EAAqD,KAAKC,QAA1D;IACD,CAFD,MAEO;MACLjG,GAAG,CAACkG,IAAJ,CAAS,KAAKlK,MAAd,EAAsB,KAAKC,GAA3B,EAAgC,IAAhC;IACD;EACF,CAND,CAME,OAAO0F,GAAP,EAAY;IACZ;IACA,OAAO,KAAKI,QAAL,CAAcJ,GAAd,CAAP;EACD,CArFkC,CAuFnC;;;EACA,IAAI,KAAKwE,gBAAT,EAA2BnG,GAAG,CAACoG,eAAJ,GAAsB,IAAtB,CAxFQ,CA0FnC;;EACA,IACE,CAAC,KAAKhD,SAAN,IACA,KAAKpH,MAAL,KAAgB,KADhB,IAEA,KAAKA,MAAL,KAAgB,MAFhB,IAGA,OAAOiJ,IAAP,KAAgB,QAHhB,IAIA,CAAC,KAAKZ,OAAL,CAAaY,IAAb,CALH,EAME;IACA;IACA,IAAMoB,WAAW,GAAG,KAAK7E,OAAL,CAAa,cAAb,CAApB;;IACA,IAAIxE,UAAS,GACX,KAAKsJ,WAAL,IACAhK,OAAO,CAACU,SAAR,CAAkBqJ,WAAW,GAAGA,WAAW,CAACnI,KAAZ,CAAkB,GAAlB,EAAuB,CAAvB,CAAH,GAA+B,EAA5D,CAFF;;IAGA,IAAI,CAAClB,UAAD,IAAc0C,MAAM,CAAC2G,WAAD,CAAxB,EAAuC;MACrCrJ,UAAS,GAAGV,OAAO,CAACU,SAAR,CAAkB,kBAAlB,CAAZ;IACD;;IAED,IAAIA,UAAJ,EAAeiI,IAAI,GAAGjI,UAAS,CAACiI,IAAD,CAAhB;EAChB,CA5GkC,CA8GnC;;;EACA,KAAK,IAAMzF,KAAX,IAAoB,KAAKiB,MAAzB,EAAiC;IAC/B,IAAI,KAAKA,MAAL,CAAYjB,KAAZ,MAAuB,IAA3B,EAAiC;IAEjC,IAAI9D,MAAM,CAAC,KAAK+E,MAAN,EAAcjB,KAAd,CAAV,EACEQ,GAAG,CAACuG,gBAAJ,CAAqB/G,KAArB,EAA4B,KAAKiB,MAAL,CAAYjB,KAAZ,CAA5B;EACH;;EAED,IAAI,KAAKoB,aAAT,EAAwB;IACtBZ,GAAG,CAACE,YAAJ,GAAmB,KAAKU,aAAxB;EACD,CAxHkC,CA0HnC;;;EACA,KAAKoB,IAAL,CAAU,SAAV,EAAqB,IAArB,EA3HmC,CA6HnC;EACA;;EACAhC,GAAG,CAACwG,IAAJ,CAAS,OAAOvB,IAAP,KAAgB,WAAhB,GAA8B,IAA9B,GAAqCA,IAA9C;AACD,CAhID;;AAkIA3I,OAAO,CAAC0H,KAAR,GAAgB;EAAA,OAAM,IAAIpI,KAAJ,EAAN;AAAA,CAAhB;;;EAEK,IAAMI,MAAM,WAAZ;;EACHJ,KAAK,CAACoF,SAAN,CAAgBhF,MAAM,CAACyD,WAAP,EAAhB,IAAwC,UAAUxD,GAAV,EAAeuH,EAAf,EAAmB;IACzD,IAAM1D,QAAQ,GAAG,IAAIxD,OAAO,CAACJ,OAAZ,CAAoBF,MAApB,EAA4BC,GAA5B,CAAjB;;IACA,KAAKwK,YAAL,CAAkB3G,QAAlB;;IACA,IAAI0D,EAAJ,EAAQ;MACN1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;IACD;;IAED,OAAO1D,QAAP;EACD,CARD;;;AADF,wBAAqB,CAAC,KAAD,EAAQ,MAAR,EAAgB,SAAhB,EAA2B,OAA3B,EAAoC,KAApC,EAA2C,QAA3C,CAArB,0BAA2E;EAAA;AAU1E;;AAEDlE,KAAK,CAACoF,SAAN,CAAgB0F,GAAhB,GAAsB9K,KAAK,CAACoF,SAAN,CAAgB2F,MAAtC;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEArK,OAAO,CAACsK,GAAR,GAAc,UAAC3K,GAAD,EAAMgJ,IAAN,EAAYzB,EAAZ,EAAmB;EAC/B,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,KAAD,EAAQL,GAAR,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC+C,KAAT,CAAeoC,IAAf;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD,CAVD;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAxD,OAAO,CAACuK,IAAR,GAAe,UAAC5K,GAAD,EAAMgJ,IAAN,EAAYzB,EAAZ,EAAmB;EAChC,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,MAAD,EAASL,GAAT,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC+C,KAAT,CAAeoC,IAAf;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD,CAVD;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAxD,OAAO,CAACkG,OAAR,GAAkB,UAACvG,GAAD,EAAMgJ,IAAN,EAAYzB,EAAZ,EAAmB;EACnC,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,SAAD,EAAYL,GAAZ,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC0G,IAAT,CAAcvB,IAAd;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD,CAVD;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAAS4G,GAAT,CAAazK,GAAb,EAAkBgJ,IAAlB,EAAwBzB,EAAxB,EAA4B;EAC1B,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,QAAD,EAAWL,GAAX,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC0G,IAAT,CAAcvB,IAAd;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD;;AAEDxD,OAAO,CAACoK,GAAR,GAAcA,GAAd;AACApK,OAAO,CAACqK,MAAR,GAAiBD,GAAjB;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEApK,OAAO,CAACwK,KAAR,GAAgB,UAAC7K,GAAD,EAAMgJ,IAAN,EAAYzB,EAAZ,EAAmB;EACjC,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,OAAD,EAAUL,GAAV,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC0G,IAAT,CAAcvB,IAAd;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD,CAVD;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAxD,OAAO,CAACyK,IAAR,GAAe,UAAC9K,GAAD,EAAMgJ,IAAN,EAAYzB,EAAZ,EAAmB;EAChC,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,MAAD,EAASL,GAAT,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC0G,IAAT,CAAcvB,IAAd;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD,CAVD;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAxD,OAAO,CAAC0K,GAAR,GAAc,UAAC/K,GAAD,EAAMgJ,IAAN,EAAYzB,EAAZ,EAAmB;EAC/B,IAAM1D,QAAQ,GAAGxD,OAAO,CAAC,KAAD,EAAQL,GAAR,CAAxB;;EACA,IAAI,OAAOgJ,IAAP,KAAgB,UAApB,EAAgC;IAC9BzB,EAAE,GAAGyB,IAAL;IACAA,IAAI,GAAG,IAAP;EACD;;EAED,IAAIA,IAAJ,EAAUnF,QAAQ,CAAC0G,IAAT,CAAcvB,IAAd;EACV,IAAIzB,EAAJ,EAAQ1D,QAAQ,CAAC3D,GAAT,CAAaqH,EAAb;EACR,OAAO1D,QAAP;AACD,CAVD"} \ No newline at end of file diff --git a/node_modules/superagent/lib/node/agent.js b/node_modules/superagent/lib/node/agent.js new file mode 100644 index 0000000..ddec9d3 --- /dev/null +++ b/node_modules/superagent/lib/node/agent.js @@ -0,0 +1,134 @@ +"use strict"; + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +/** + * Module dependencies. + */ +// eslint-disable-next-line node/no-deprecated-api +var _require = require('url'), + parse = _require.parse; + +var _require2 = require('cookiejar'), + CookieJar = _require2.CookieJar; + +var _require3 = require('cookiejar'), + CookieAccessInfo = _require3.CookieAccessInfo; + +var methods = require('methods'); + +var request = require('../..'); + +var AgentBase = require('../agent-base'); +/** + * Expose `Agent`. + */ + + +module.exports = Agent; +/** + * Initialize a new `Agent`. + * + * @api public + */ + +function Agent(options) { + if (!(this instanceof Agent)) { + return new Agent(options); + } + + AgentBase.call(this); + this.jar = new CookieJar(); + + if (options) { + if (options.ca) { + this.ca(options.ca); + } + + if (options.key) { + this.key(options.key); + } + + if (options.pfx) { + this.pfx(options.pfx); + } + + if (options.cert) { + this.cert(options.cert); + } + + if (options.rejectUnauthorized === false) { + this.disableTLSCerts(); + } + } +} + +Agent.prototype = Object.create(AgentBase.prototype); +/** + * Save the cookies in the given `res` to + * the agent's cookie jar for persistence. + * + * @param {Response} res + * @api private + */ + +Agent.prototype._saveCookies = function (res) { + var cookies = res.headers['set-cookie']; + if (cookies) this.jar.setCookies(cookies); +}; +/** + * Attach cookies when available to the given `req`. + * + * @param {Request} req + * @api private + */ + + +Agent.prototype._attachCookies = function (request_) { + var url = parse(request_.url); + var access = new CookieAccessInfo(url.hostname, url.pathname, url.protocol === 'https:'); + var cookies = this.jar.getCookies(access).toValueString(); + request_.cookies = cookies; +}; + +var _iterator = _createForOfIteratorHelper(methods), + _step; + +try { + var _loop = function _loop() { + var name = _step.value; + var method = name.toUpperCase(); + + Agent.prototype[name] = function (url, fn) { + var request_ = new request.Request(method, url); + request_.on('response', this._saveCookies.bind(this)); + request_.on('redirect', this._saveCookies.bind(this)); + request_.on('redirect', this._attachCookies.bind(this, request_)); + + this._setDefaults(request_); + + this._attachCookies(request_); + + if (fn) { + request_.end(fn); + } + + return request_; + }; + }; + + for (_iterator.s(); !(_step = _iterator.n()).done;) { + _loop(); + } +} catch (err) { + _iterator.e(err); +} finally { + _iterator.f(); +} + +Agent.prototype.del = Agent.prototype.delete; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJyZXF1aXJlIiwicGFyc2UiLCJDb29raWVKYXIiLCJDb29raWVBY2Nlc3NJbmZvIiwibWV0aG9kcyIsInJlcXVlc3QiLCJBZ2VudEJhc2UiLCJtb2R1bGUiLCJleHBvcnRzIiwiQWdlbnQiLCJvcHRpb25zIiwiY2FsbCIsImphciIsImNhIiwia2V5IiwicGZ4IiwiY2VydCIsInJlamVjdFVuYXV0aG9yaXplZCIsImRpc2FibGVUTFNDZXJ0cyIsInByb3RvdHlwZSIsIk9iamVjdCIsImNyZWF0ZSIsIl9zYXZlQ29va2llcyIsInJlcyIsImNvb2tpZXMiLCJoZWFkZXJzIiwic2V0Q29va2llcyIsIl9hdHRhY2hDb29raWVzIiwicmVxdWVzdF8iLCJ1cmwiLCJhY2Nlc3MiLCJob3N0bmFtZSIsInBhdGhuYW1lIiwicHJvdG9jb2wiLCJnZXRDb29raWVzIiwidG9WYWx1ZVN0cmluZyIsIm5hbWUiLCJtZXRob2QiLCJ0b1VwcGVyQ2FzZSIsImZuIiwiUmVxdWVzdCIsIm9uIiwiYmluZCIsIl9zZXREZWZhdWx0cyIsImVuZCIsImRlbCIsImRlbGV0ZSJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9ub2RlL2FnZW50LmpzIl0sInNvdXJjZXNDb250ZW50IjpbIi8qKlxuICogTW9kdWxlIGRlcGVuZGVuY2llcy5cbiAqL1xuXG4vLyBlc2xpbnQtZGlzYWJsZS1uZXh0LWxpbmUgbm9kZS9uby1kZXByZWNhdGVkLWFwaVxuY29uc3QgeyBwYXJzZSB9ID0gcmVxdWlyZSgndXJsJyk7XG5jb25zdCB7IENvb2tpZUphciB9ID0gcmVxdWlyZSgnY29va2llamFyJyk7XG5jb25zdCB7IENvb2tpZUFjY2Vzc0luZm8gfSA9IHJlcXVpcmUoJ2Nvb2tpZWphcicpO1xuY29uc3QgbWV0aG9kcyA9IHJlcXVpcmUoJ21ldGhvZHMnKTtcbmNvbnN0IHJlcXVlc3QgPSByZXF1aXJlKCcuLi8uLicpO1xuY29uc3QgQWdlbnRCYXNlID0gcmVxdWlyZSgnLi4vYWdlbnQtYmFzZScpO1xuXG4vKipcbiAqIEV4cG9zZSBgQWdlbnRgLlxuICovXG5cbm1vZHVsZS5leHBvcnRzID0gQWdlbnQ7XG5cbi8qKlxuICogSW5pdGlhbGl6ZSBhIG5ldyBgQWdlbnRgLlxuICpcbiAqIEBhcGkgcHVibGljXG4gKi9cblxuZnVuY3Rpb24gQWdlbnQob3B0aW9ucykge1xuICBpZiAoISh0aGlzIGluc3RhbmNlb2YgQWdlbnQpKSB7XG4gICAgcmV0dXJuIG5ldyBBZ2VudChvcHRpb25zKTtcbiAgfVxuXG4gIEFnZW50QmFzZS5jYWxsKHRoaXMpO1xuICB0aGlzLmphciA9IG5ldyBDb29raWVKYXIoKTtcblxuICBpZiAob3B0aW9ucykge1xuICAgIGlmIChvcHRpb25zLmNhKSB7XG4gICAgICB0aGlzLmNhKG9wdGlvbnMuY2EpO1xuICAgIH1cblxuICAgIGlmIChvcHRpb25zLmtleSkge1xuICAgICAgdGhpcy5rZXkob3B0aW9ucy5rZXkpO1xuICAgIH1cblxuICAgIGlmIChvcHRpb25zLnBmeCkge1xuICAgICAgdGhpcy5wZngob3B0aW9ucy5wZngpO1xuICAgIH1cblxuICAgIGlmIChvcHRpb25zLmNlcnQpIHtcbiAgICAgIHRoaXMuY2VydChvcHRpb25zLmNlcnQpO1xuICAgIH1cblxuICAgIGlmIChvcHRpb25zLnJlamVjdFVuYXV0aG9yaXplZCA9PT0gZmFsc2UpIHtcbiAgICAgIHRoaXMuZGlzYWJsZVRMU0NlcnRzKCk7XG4gICAgfVxuICB9XG59XG5cbkFnZW50LnByb3RvdHlwZSA9IE9iamVjdC5jcmVhdGUoQWdlbnRCYXNlLnByb3RvdHlwZSk7XG5cbi8qKlxuICogU2F2ZSB0aGUgY29va2llcyBpbiB0aGUgZ2l2ZW4gYHJlc2AgdG9cbiAqIHRoZSBhZ2VudCdzIGNvb2tpZSBqYXIgZm9yIHBlcnNpc3RlbmNlLlxuICpcbiAqIEBwYXJhbSB7UmVzcG9uc2V9IHJlc1xuICogQGFwaSBwcml2YXRlXG4gKi9cblxuQWdlbnQucHJvdG90eXBlLl9zYXZlQ29va2llcyA9IGZ1bmN0aW9uIChyZXMpIHtcbiAgY29uc3QgY29va2llcyA9IHJlcy5oZWFkZXJzWydzZXQtY29va2llJ107XG4gIGlmIChjb29raWVzKSB0aGlzLmphci5zZXRDb29raWVzKGNvb2tpZXMpO1xufTtcblxuLyoqXG4gKiBBdHRhY2ggY29va2llcyB3aGVuIGF2YWlsYWJsZSB0byB0aGUgZ2l2ZW4gYHJlcWAuXG4gKlxuICogQHBhcmFtIHtSZXF1ZXN0fSByZXFcbiAqIEBhcGkgcHJpdmF0ZVxuICovXG5cbkFnZW50LnByb3RvdHlwZS5fYXR0YWNoQ29va2llcyA9IGZ1bmN0aW9uIChyZXF1ZXN0Xykge1xuICBjb25zdCB1cmwgPSBwYXJzZShyZXF1ZXN0Xy51cmwpO1xuICBjb25zdCBhY2Nlc3MgPSBuZXcgQ29va2llQWNjZXNzSW5mbyhcbiAgICB1cmwuaG9zdG5hbWUsXG4gICAgdXJsLnBhdGhuYW1lLFxuICAgIHVybC5wcm90b2NvbCA9PT0gJ2h0dHBzOidcbiAgKTtcbiAgY29uc3QgY29va2llcyA9IHRoaXMuamFyLmdldENvb2tpZXMoYWNjZXNzKS50b1ZhbHVlU3RyaW5nKCk7XG4gIHJlcXVlc3RfLmNvb2tpZXMgPSBjb29raWVzO1xufTtcblxuZm9yIChjb25zdCBuYW1lIG9mIG1ldGhvZHMpIHtcbiAgY29uc3QgbWV0aG9kID0gbmFtZS50b1VwcGVyQ2FzZSgpO1xuICBBZ2VudC5wcm90b3R5cGVbbmFtZV0gPSBmdW5jdGlvbiAodXJsLCBmbikge1xuICAgIGNvbnN0IHJlcXVlc3RfID0gbmV3IHJlcXVlc3QuUmVxdWVzdChtZXRob2QsIHVybCk7XG5cbiAgICByZXF1ZXN0Xy5vbigncmVzcG9uc2UnLCB0aGlzLl9zYXZlQ29va2llcy5iaW5kKHRoaXMpKTtcbiAgICByZXF1ZXN0Xy5vbigncmVkaXJlY3QnLCB0aGlzLl9zYXZlQ29va2llcy5iaW5kKHRoaXMpKTtcbiAgICByZXF1ZXN0Xy5vbigncmVkaXJlY3QnLCB0aGlzLl9hdHRhY2hDb29raWVzLmJpbmQodGhpcywgcmVxdWVzdF8pKTtcbiAgICB0aGlzLl9zZXREZWZhdWx0cyhyZXF1ZXN0Xyk7XG4gICAgdGhpcy5fYXR0YWNoQ29va2llcyhyZXF1ZXN0Xyk7XG5cbiAgICBpZiAoZm4pIHtcbiAgICAgIHJlcXVlc3RfLmVuZChmbik7XG4gICAgfVxuXG4gICAgcmV0dXJuIHJlcXVlc3RfO1xuICB9O1xufVxuXG5BZ2VudC5wcm90b3R5cGUuZGVsID0gQWdlbnQucHJvdG90eXBlLmRlbGV0ZTtcbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7QUFBQTtBQUNBO0FBQ0E7QUFFQTtBQUNBLGVBQWtCQSxPQUFPLENBQUMsS0FBRCxDQUF6QjtBQUFBLElBQVFDLEtBQVIsWUFBUUEsS0FBUjs7QUFDQSxnQkFBc0JELE9BQU8sQ0FBQyxXQUFELENBQTdCO0FBQUEsSUFBUUUsU0FBUixhQUFRQSxTQUFSOztBQUNBLGdCQUE2QkYsT0FBTyxDQUFDLFdBQUQsQ0FBcEM7QUFBQSxJQUFRRyxnQkFBUixhQUFRQSxnQkFBUjs7QUFDQSxJQUFNQyxPQUFPLEdBQUdKLE9BQU8sQ0FBQyxTQUFELENBQXZCOztBQUNBLElBQU1LLE9BQU8sR0FBR0wsT0FBTyxDQUFDLE9BQUQsQ0FBdkI7O0FBQ0EsSUFBTU0sU0FBUyxHQUFHTixPQUFPLENBQUMsZUFBRCxDQUF6QjtBQUVBO0FBQ0E7QUFDQTs7O0FBRUFPLE1BQU0sQ0FBQ0MsT0FBUCxHQUFpQkMsS0FBakI7QUFFQTtBQUNBO0FBQ0E7QUFDQTtBQUNBOztBQUVBLFNBQVNBLEtBQVQsQ0FBZUMsT0FBZixFQUF3QjtFQUN0QixJQUFJLEVBQUUsZ0JBQWdCRCxLQUFsQixDQUFKLEVBQThCO0lBQzVCLE9BQU8sSUFBSUEsS0FBSixDQUFVQyxPQUFWLENBQVA7RUFDRDs7RUFFREosU0FBUyxDQUFDSyxJQUFWLENBQWUsSUFBZjtFQUNBLEtBQUtDLEdBQUwsR0FBVyxJQUFJVixTQUFKLEVBQVg7O0VBRUEsSUFBSVEsT0FBSixFQUFhO0lBQ1gsSUFBSUEsT0FBTyxDQUFDRyxFQUFaLEVBQWdCO01BQ2QsS0FBS0EsRUFBTCxDQUFRSCxPQUFPLENBQUNHLEVBQWhCO0lBQ0Q7O0lBRUQsSUFBSUgsT0FBTyxDQUFDSSxHQUFaLEVBQWlCO01BQ2YsS0FBS0EsR0FBTCxDQUFTSixPQUFPLENBQUNJLEdBQWpCO0lBQ0Q7O0lBRUQsSUFBSUosT0FBTyxDQUFDSyxHQUFaLEVBQWlCO01BQ2YsS0FBS0EsR0FBTCxDQUFTTCxPQUFPLENBQUNLLEdBQWpCO0lBQ0Q7O0lBRUQsSUFBSUwsT0FBTyxDQUFDTSxJQUFaLEVBQWtCO01BQ2hCLEtBQUtBLElBQUwsQ0FBVU4sT0FBTyxDQUFDTSxJQUFsQjtJQUNEOztJQUVELElBQUlOLE9BQU8sQ0FBQ08sa0JBQVIsS0FBK0IsS0FBbkMsRUFBMEM7TUFDeEMsS0FBS0MsZUFBTDtJQUNEO0VBQ0Y7QUFDRjs7QUFFRFQsS0FBSyxDQUFDVSxTQUFOLEdBQWtCQyxNQUFNLENBQUNDLE1BQVAsQ0FBY2YsU0FBUyxDQUFDYSxTQUF4QixDQUFsQjtBQUVBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBOztBQUVBVixLQUFLLENBQUNVLFNBQU4sQ0FBZ0JHLFlBQWhCLEdBQStCLFVBQVVDLEdBQVYsRUFBZTtFQUM1QyxJQUFNQyxPQUFPLEdBQUdELEdBQUcsQ0FBQ0UsT0FBSixDQUFZLFlBQVosQ0FBaEI7RUFDQSxJQUFJRCxPQUFKLEVBQWEsS0FBS1osR0FBTCxDQUFTYyxVQUFULENBQW9CRixPQUFwQjtBQUNkLENBSEQ7QUFLQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7OztBQUVBZixLQUFLLENBQUNVLFNBQU4sQ0FBZ0JRLGNBQWhCLEdBQWlDLFVBQVVDLFFBQVYsRUFBb0I7RUFDbkQsSUFBTUMsR0FBRyxHQUFHNUIsS0FBSyxDQUFDMkIsUUFBUSxDQUFDQyxHQUFWLENBQWpCO0VBQ0EsSUFBTUMsTUFBTSxHQUFHLElBQUkzQixnQkFBSixDQUNiMEIsR0FBRyxDQUFDRSxRQURTLEVBRWJGLEdBQUcsQ0FBQ0csUUFGUyxFQUdiSCxHQUFHLENBQUNJLFFBQUosS0FBaUIsUUFISixDQUFmO0VBS0EsSUFBTVQsT0FBTyxHQUFHLEtBQUtaLEdBQUwsQ0FBU3NCLFVBQVQsQ0FBb0JKLE1BQXBCLEVBQTRCSyxhQUE1QixFQUFoQjtFQUNBUCxRQUFRLENBQUNKLE9BQVQsR0FBbUJBLE9BQW5CO0FBQ0QsQ0FURDs7MkNBV21CcEIsTzs7Ozs7UUFBUmdDLEk7SUFDVCxJQUFNQyxNQUFNLEdBQUdELElBQUksQ0FBQ0UsV0FBTCxFQUFmOztJQUNBN0IsS0FBSyxDQUFDVSxTQUFOLENBQWdCaUIsSUFBaEIsSUFBd0IsVUFBVVAsR0FBVixFQUFlVSxFQUFmLEVBQW1CO01BQ3pDLElBQU1YLFFBQVEsR0FBRyxJQUFJdkIsT0FBTyxDQUFDbUMsT0FBWixDQUFvQkgsTUFBcEIsRUFBNEJSLEdBQTVCLENBQWpCO01BRUFELFFBQVEsQ0FBQ2EsRUFBVCxDQUFZLFVBQVosRUFBd0IsS0FBS25CLFlBQUwsQ0FBa0JvQixJQUFsQixDQUF1QixJQUF2QixDQUF4QjtNQUNBZCxRQUFRLENBQUNhLEVBQVQsQ0FBWSxVQUFaLEVBQXdCLEtBQUtuQixZQUFMLENBQWtCb0IsSUFBbEIsQ0FBdUIsSUFBdkIsQ0FBeEI7TUFDQWQsUUFBUSxDQUFDYSxFQUFULENBQVksVUFBWixFQUF3QixLQUFLZCxjQUFMLENBQW9CZSxJQUFwQixDQUF5QixJQUF6QixFQUErQmQsUUFBL0IsQ0FBeEI7O01BQ0EsS0FBS2UsWUFBTCxDQUFrQmYsUUFBbEI7O01BQ0EsS0FBS0QsY0FBTCxDQUFvQkMsUUFBcEI7O01BRUEsSUFBSVcsRUFBSixFQUFRO1FBQ05YLFFBQVEsQ0FBQ2dCLEdBQVQsQ0FBYUwsRUFBYjtNQUNEOztNQUVELE9BQU9YLFFBQVA7SUFDRCxDQWREOzs7RUFGRixvREFBNEI7SUFBQTtFQWlCM0I7Ozs7Ozs7QUFFRG5CLEtBQUssQ0FBQ1UsU0FBTixDQUFnQjBCLEdBQWhCLEdBQXNCcEMsS0FBSyxDQUFDVSxTQUFOLENBQWdCMkIsTUFBdEMifQ== \ No newline at end of file diff --git a/node_modules/superagent/lib/node/http2wrapper.js b/node_modules/superagent/lib/node/http2wrapper.js new file mode 100644 index 0000000..b262112 --- /dev/null +++ b/node_modules/superagent/lib/node/http2wrapper.js @@ -0,0 +1,220 @@ +"use strict"; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var Stream = require('stream'); + +var util = require('util'); + +var net = require('net'); + +var tls = require('tls'); // eslint-disable-next-line node/no-deprecated-api + + +var _require = require('url'), + parse = _require.parse; + +var process = require('process'); + +var semverGte = require('semver/functions/gte'); + +var http2; +if (semverGte(process.version, 'v10.10.0')) http2 = require('http2');else throw new Error('superagent: this version of Node.js does not support http2'); +var _http2$constants = http2.constants, + HTTP2_HEADER_PATH = _http2$constants.HTTP2_HEADER_PATH, + HTTP2_HEADER_STATUS = _http2$constants.HTTP2_HEADER_STATUS, + HTTP2_HEADER_METHOD = _http2$constants.HTTP2_HEADER_METHOD, + HTTP2_HEADER_AUTHORITY = _http2$constants.HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_HOST = _http2$constants.HTTP2_HEADER_HOST, + HTTP2_HEADER_SET_COOKIE = _http2$constants.HTTP2_HEADER_SET_COOKIE, + NGHTTP2_CANCEL = _http2$constants.NGHTTP2_CANCEL; + +function setProtocol(protocol) { + return { + request: function request(options) { + return new Request(protocol, options); + } + }; +} + +function Request(protocol, options) { + var _this = this; + + Stream.call(this); + var defaultPort = protocol === 'https:' ? 443 : 80; + var defaultHost = 'localhost'; + var port = options.port || defaultPort; + var host = options.host || defaultHost; + delete options.port; + delete options.host; + this.method = options.method; + this.path = options.path; + this.protocol = protocol; + this.host = host; + delete options.method; + delete options.path; + + var sessionOptions = _objectSpread({}, options); + + if (options.socketPath) { + sessionOptions.socketPath = options.socketPath; + sessionOptions.createConnection = this.createUnixConnection.bind(this); + } + + this._headers = {}; + var session = http2.connect("".concat(protocol, "//").concat(host, ":").concat(port), sessionOptions); + this.setHeader('host', "".concat(host, ":").concat(port)); + session.on('error', function (error) { + return _this.emit('error', error); + }); + this.session = session; +} +/** + * Inherit from `Stream` (which inherits from `EventEmitter`). + */ + + +util.inherits(Request, Stream); + +Request.prototype.createUnixConnection = function (authority, options) { + switch (this.protocol) { + case 'http:': + return net.connect(options.socketPath); + + case 'https:': + options.ALPNProtocols = ['h2']; + options.servername = this.host; + options.allowHalfOpen = true; + return tls.connect(options.socketPath, options); + + default: + throw new Error('Unsupported protocol', this.protocol); + } +}; // eslint-disable-next-line no-unused-vars + + +Request.prototype.setNoDelay = function (bool) {// We can not use setNoDelay with HTTP/2. + // Node 10 limits http2session.socket methods to ones safe to use with HTTP/2. + // See also https://nodejs.org/api/http2.html#http2_http2session_socket +}; + +Request.prototype.getFrame = function () { + var _method, + _this2 = this; + + if (this.frame) { + return this.frame; + } + + var method = (_method = {}, _defineProperty(_method, HTTP2_HEADER_PATH, this.path), _defineProperty(_method, HTTP2_HEADER_METHOD, this.method), _method); + var headers = this.mapToHttp2Header(this._headers); + headers = Object.assign(headers, method); + var frame = this.session.request(headers); // eslint-disable-next-line no-unused-vars + + frame.once('response', function (headers, flags) { + headers = _this2.mapToHttpHeader(headers); + frame.headers = headers; + frame.statusCode = headers[HTTP2_HEADER_STATUS]; + frame.status = frame.statusCode; + + _this2.emit('response', frame); + }); + this._headerSent = true; + frame.once('drain', function () { + return _this2.emit('drain'); + }); + frame.on('error', function (error) { + return _this2.emit('error', error); + }); + frame.on('close', function () { + return _this2.session.close(); + }); + this.frame = frame; + return frame; +}; + +Request.prototype.mapToHttpHeader = function (headers) { + var keys = Object.keys(headers); + var http2Headers = {}; + + for (var _i = 0, _keys = keys; _i < _keys.length; _i++) { + var key = _keys[_i]; + var value = headers[key]; + key = key.toLowerCase(); + + switch (key) { + case HTTP2_HEADER_SET_COOKIE: + value = Array.isArray(value) ? value : [value]; + break; + + default: + break; + } + + http2Headers[key] = value; + } + + return http2Headers; +}; + +Request.prototype.mapToHttp2Header = function (headers) { + var keys = Object.keys(headers); + var http2Headers = {}; + + for (var _i2 = 0, _keys2 = keys; _i2 < _keys2.length; _i2++) { + var key = _keys2[_i2]; + var value = headers[key]; + key = key.toLowerCase(); + + switch (key) { + case HTTP2_HEADER_HOST: + key = HTTP2_HEADER_AUTHORITY; + value = /^http:\/\/|^https:\/\//.test(value) ? parse(value).host : value; + break; + + default: + break; + } + + http2Headers[key] = value; + } + + return http2Headers; +}; + +Request.prototype.setHeader = function (name, value) { + this._headers[name.toLowerCase()] = value; +}; + +Request.prototype.getHeader = function (name) { + return this._headers[name.toLowerCase()]; +}; + +Request.prototype.write = function (data, encoding) { + var frame = this.getFrame(); + return frame.write(data, encoding); +}; + +Request.prototype.pipe = function (stream, options) { + var frame = this.getFrame(); + return frame.pipe(stream, options); +}; + +Request.prototype.end = function (data) { + var frame = this.getFrame(); + frame.end(data); +}; // eslint-disable-next-line no-unused-vars + + +Request.prototype.abort = function (data) { + var frame = this.getFrame(); + frame.close(NGHTTP2_CANCEL); + this.session.destroy(); +}; + +exports.setProtocol = setProtocol; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["Stream","require","util","net","tls","parse","process","semverGte","http2","version","Error","constants","HTTP2_HEADER_PATH","HTTP2_HEADER_STATUS","HTTP2_HEADER_METHOD","HTTP2_HEADER_AUTHORITY","HTTP2_HEADER_HOST","HTTP2_HEADER_SET_COOKIE","NGHTTP2_CANCEL","setProtocol","protocol","request","options","Request","call","defaultPort","defaultHost","port","host","method","path","sessionOptions","socketPath","createConnection","createUnixConnection","bind","_headers","session","connect","setHeader","on","error","emit","inherits","prototype","authority","ALPNProtocols","servername","allowHalfOpen","setNoDelay","bool","getFrame","frame","headers","mapToHttp2Header","Object","assign","once","flags","mapToHttpHeader","statusCode","status","_headerSent","close","keys","http2Headers","key","value","toLowerCase","Array","isArray","test","name","getHeader","write","data","encoding","pipe","stream","end","abort","destroy","exports"],"sources":["../../src/node/http2wrapper.js"],"sourcesContent":["const Stream = require('stream');\nconst util = require('util');\nconst net = require('net');\nconst tls = require('tls');\n// eslint-disable-next-line node/no-deprecated-api\nconst { parse } = require('url');\nconst process = require('process');\nconst semverGte = require('semver/functions/gte');\n\nlet http2;\n\nif (semverGte(process.version, 'v10.10.0')) http2 = require('http2');\nelse\n  throw new Error('superagent: this version of Node.js does not support http2');\n\nconst {\n  HTTP2_HEADER_PATH,\n  HTTP2_HEADER_STATUS,\n  HTTP2_HEADER_METHOD,\n  HTTP2_HEADER_AUTHORITY,\n  HTTP2_HEADER_HOST,\n  HTTP2_HEADER_SET_COOKIE,\n  NGHTTP2_CANCEL\n} = http2.constants;\n\nfunction setProtocol(protocol) {\n  return {\n    request(options) {\n      return new Request(protocol, options);\n    }\n  };\n}\n\nfunction Request(protocol, options) {\n  Stream.call(this);\n  const defaultPort = protocol === 'https:' ? 443 : 80;\n  const defaultHost = 'localhost';\n  const port = options.port || defaultPort;\n  const host = options.host || defaultHost;\n\n  delete options.port;\n  delete options.host;\n\n  this.method = options.method;\n  this.path = options.path;\n  this.protocol = protocol;\n  this.host = host;\n\n  delete options.method;\n  delete options.path;\n\n  const sessionOptions = { ...options };\n  if (options.socketPath) {\n    sessionOptions.socketPath = options.socketPath;\n    sessionOptions.createConnection = this.createUnixConnection.bind(this);\n  }\n\n  this._headers = {};\n\n  const session = http2.connect(`${protocol}//${host}:${port}`, sessionOptions);\n  this.setHeader('host', `${host}:${port}`);\n\n  session.on('error', (error) => this.emit('error', error));\n\n  this.session = session;\n}\n\n/**\n * Inherit from `Stream` (which inherits from `EventEmitter`).\n */\nutil.inherits(Request, Stream);\n\nRequest.prototype.createUnixConnection = function (authority, options) {\n  switch (this.protocol) {\n    case 'http:':\n      return net.connect(options.socketPath);\n    case 'https:':\n      options.ALPNProtocols = ['h2'];\n      options.servername = this.host;\n      options.allowHalfOpen = true;\n      return tls.connect(options.socketPath, options);\n    default:\n      throw new Error('Unsupported protocol', this.protocol);\n  }\n};\n\n// eslint-disable-next-line no-unused-vars\nRequest.prototype.setNoDelay = function (bool) {\n  // We can not use setNoDelay with HTTP/2.\n  // Node 10 limits http2session.socket methods to ones safe to use with HTTP/2.\n  // See also https://nodejs.org/api/http2.html#http2_http2session_socket\n};\n\nRequest.prototype.getFrame = function () {\n  if (this.frame) {\n    return this.frame;\n  }\n\n  const method = {\n    [HTTP2_HEADER_PATH]: this.path,\n    [HTTP2_HEADER_METHOD]: this.method\n  };\n\n  let headers = this.mapToHttp2Header(this._headers);\n\n  headers = Object.assign(headers, method);\n\n  const frame = this.session.request(headers);\n  // eslint-disable-next-line no-unused-vars\n  frame.once('response', (headers, flags) => {\n    headers = this.mapToHttpHeader(headers);\n    frame.headers = headers;\n    frame.statusCode = headers[HTTP2_HEADER_STATUS];\n    frame.status = frame.statusCode;\n    this.emit('response', frame);\n  });\n\n  this._headerSent = true;\n\n  frame.once('drain', () => this.emit('drain'));\n  frame.on('error', (error) => this.emit('error', error));\n  frame.on('close', () => this.session.close());\n\n  this.frame = frame;\n  return frame;\n};\n\nRequest.prototype.mapToHttpHeader = function (headers) {\n  const keys = Object.keys(headers);\n  const http2Headers = {};\n  for (let key of keys) {\n    let value = headers[key];\n    key = key.toLowerCase();\n    switch (key) {\n      case HTTP2_HEADER_SET_COOKIE:\n        value = Array.isArray(value) ? value : [value];\n        break;\n      default:\n        break;\n    }\n\n    http2Headers[key] = value;\n  }\n\n  return http2Headers;\n};\n\nRequest.prototype.mapToHttp2Header = function (headers) {\n  const keys = Object.keys(headers);\n  const http2Headers = {};\n  for (let key of keys) {\n    let value = headers[key];\n    key = key.toLowerCase();\n    switch (key) {\n      case HTTP2_HEADER_HOST:\n        key = HTTP2_HEADER_AUTHORITY;\n        value = /^http:\\/\\/|^https:\\/\\//.test(value)\n          ? parse(value).host\n          : value;\n        break;\n      default:\n        break;\n    }\n\n    http2Headers[key] = value;\n  }\n\n  return http2Headers;\n};\n\nRequest.prototype.setHeader = function (name, value) {\n  this._headers[name.toLowerCase()] = value;\n};\n\nRequest.prototype.getHeader = function (name) {\n  return this._headers[name.toLowerCase()];\n};\n\nRequest.prototype.write = function (data, encoding) {\n  const frame = this.getFrame();\n  return frame.write(data, encoding);\n};\n\nRequest.prototype.pipe = function (stream, options) {\n  const frame = this.getFrame();\n  return frame.pipe(stream, options);\n};\n\nRequest.prototype.end = function (data) {\n  const frame = this.getFrame();\n  frame.end(data);\n};\n\n// eslint-disable-next-line no-unused-vars\nRequest.prototype.abort = function (data) {\n  const frame = this.getFrame();\n  frame.close(NGHTTP2_CANCEL);\n  this.session.destroy();\n};\n\nexports.setProtocol = setProtocol;\n"],"mappings":";;;;;;;;AAAA,IAAMA,MAAM,GAAGC,OAAO,CAAC,QAAD,CAAtB;;AACA,IAAMC,IAAI,GAAGD,OAAO,CAAC,MAAD,CAApB;;AACA,IAAME,GAAG,GAAGF,OAAO,CAAC,KAAD,CAAnB;;AACA,IAAMG,GAAG,GAAGH,OAAO,CAAC,KAAD,CAAnB,C,CACA;;;AACA,eAAkBA,OAAO,CAAC,KAAD,CAAzB;AAAA,IAAQI,KAAR,YAAQA,KAAR;;AACA,IAAMC,OAAO,GAAGL,OAAO,CAAC,SAAD,CAAvB;;AACA,IAAMM,SAAS,GAAGN,OAAO,CAAC,sBAAD,CAAzB;;AAEA,IAAIO,KAAJ;AAEA,IAAID,SAAS,CAACD,OAAO,CAACG,OAAT,EAAkB,UAAlB,CAAb,EAA4CD,KAAK,GAAGP,OAAO,CAAC,OAAD,CAAf,CAA5C,KAEE,MAAM,IAAIS,KAAJ,CAAU,4DAAV,CAAN;AAEF,uBAQIF,KAAK,CAACG,SARV;AAAA,IACEC,iBADF,oBACEA,iBADF;AAAA,IAEEC,mBAFF,oBAEEA,mBAFF;AAAA,IAGEC,mBAHF,oBAGEA,mBAHF;AAAA,IAIEC,sBAJF,oBAIEA,sBAJF;AAAA,IAKEC,iBALF,oBAKEA,iBALF;AAAA,IAMEC,uBANF,oBAMEA,uBANF;AAAA,IAOEC,cAPF,oBAOEA,cAPF;;AAUA,SAASC,WAAT,CAAqBC,QAArB,EAA+B;EAC7B,OAAO;IACLC,OADK,mBACGC,OADH,EACY;MACf,OAAO,IAAIC,OAAJ,CAAYH,QAAZ,EAAsBE,OAAtB,CAAP;IACD;EAHI,CAAP;AAKD;;AAED,SAASC,OAAT,CAAiBH,QAAjB,EAA2BE,OAA3B,EAAoC;EAAA;;EAClCtB,MAAM,CAACwB,IAAP,CAAY,IAAZ;EACA,IAAMC,WAAW,GAAGL,QAAQ,KAAK,QAAb,GAAwB,GAAxB,GAA8B,EAAlD;EACA,IAAMM,WAAW,GAAG,WAApB;EACA,IAAMC,IAAI,GAAGL,OAAO,CAACK,IAAR,IAAgBF,WAA7B;EACA,IAAMG,IAAI,GAAGN,OAAO,CAACM,IAAR,IAAgBF,WAA7B;EAEA,OAAOJ,OAAO,CAACK,IAAf;EACA,OAAOL,OAAO,CAACM,IAAf;EAEA,KAAKC,MAAL,GAAcP,OAAO,CAACO,MAAtB;EACA,KAAKC,IAAL,GAAYR,OAAO,CAACQ,IAApB;EACA,KAAKV,QAAL,GAAgBA,QAAhB;EACA,KAAKQ,IAAL,GAAYA,IAAZ;EAEA,OAAON,OAAO,CAACO,MAAf;EACA,OAAOP,OAAO,CAACQ,IAAf;;EAEA,IAAMC,cAAc,qBAAQT,OAAR,CAApB;;EACA,IAAIA,OAAO,CAACU,UAAZ,EAAwB;IACtBD,cAAc,CAACC,UAAf,GAA4BV,OAAO,CAACU,UAApC;IACAD,cAAc,CAACE,gBAAf,GAAkC,KAAKC,oBAAL,CAA0BC,IAA1B,CAA+B,IAA/B,CAAlC;EACD;;EAED,KAAKC,QAAL,GAAgB,EAAhB;EAEA,IAAMC,OAAO,GAAG7B,KAAK,CAAC8B,OAAN,WAAiBlB,QAAjB,eAA8BQ,IAA9B,cAAsCD,IAAtC,GAA8CI,cAA9C,CAAhB;EACA,KAAKQ,SAAL,CAAe,MAAf,YAA0BX,IAA1B,cAAkCD,IAAlC;EAEAU,OAAO,CAACG,EAAR,CAAW,OAAX,EAAoB,UAACC,KAAD;IAAA,OAAW,KAAI,CAACC,IAAL,CAAU,OAAV,EAAmBD,KAAnB,CAAX;EAAA,CAApB;EAEA,KAAKJ,OAAL,GAAeA,OAAf;AACD;AAED;AACA;AACA;;;AACAnC,IAAI,CAACyC,QAAL,CAAcpB,OAAd,EAAuBvB,MAAvB;;AAEAuB,OAAO,CAACqB,SAAR,CAAkBV,oBAAlB,GAAyC,UAAUW,SAAV,EAAqBvB,OAArB,EAA8B;EACrE,QAAQ,KAAKF,QAAb;IACE,KAAK,OAAL;MACE,OAAOjB,GAAG,CAACmC,OAAJ,CAAYhB,OAAO,CAACU,UAApB,CAAP;;IACF,KAAK,QAAL;MACEV,OAAO,CAACwB,aAAR,GAAwB,CAAC,IAAD,CAAxB;MACAxB,OAAO,CAACyB,UAAR,GAAqB,KAAKnB,IAA1B;MACAN,OAAO,CAAC0B,aAAR,GAAwB,IAAxB;MACA,OAAO5C,GAAG,CAACkC,OAAJ,CAAYhB,OAAO,CAACU,UAApB,EAAgCV,OAAhC,CAAP;;IACF;MACE,MAAM,IAAIZ,KAAJ,CAAU,sBAAV,EAAkC,KAAKU,QAAvC,CAAN;EATJ;AAWD,CAZD,C,CAcA;;;AACAG,OAAO,CAACqB,SAAR,CAAkBK,UAAlB,GAA+B,UAAUC,IAAV,EAAgB,CAC7C;EACA;EACA;AACD,CAJD;;AAMA3B,OAAO,CAACqB,SAAR,CAAkBO,QAAlB,GAA6B,YAAY;EAAA;EAAA;;EACvC,IAAI,KAAKC,KAAT,EAAgB;IACd,OAAO,KAAKA,KAAZ;EACD;;EAED,IAAMvB,MAAM,2CACTjB,iBADS,EACW,KAAKkB,IADhB,4BAEThB,mBAFS,EAEa,KAAKe,MAFlB,WAAZ;EAKA,IAAIwB,OAAO,GAAG,KAAKC,gBAAL,CAAsB,KAAKlB,QAA3B,CAAd;EAEAiB,OAAO,GAAGE,MAAM,CAACC,MAAP,CAAcH,OAAd,EAAuBxB,MAAvB,CAAV;EAEA,IAAMuB,KAAK,GAAG,KAAKf,OAAL,CAAahB,OAAb,CAAqBgC,OAArB,CAAd,CAduC,CAevC;;EACAD,KAAK,CAACK,IAAN,CAAW,UAAX,EAAuB,UAACJ,OAAD,EAAUK,KAAV,EAAoB;IACzCL,OAAO,GAAG,MAAI,CAACM,eAAL,CAAqBN,OAArB,CAAV;IACAD,KAAK,CAACC,OAAN,GAAgBA,OAAhB;IACAD,KAAK,CAACQ,UAAN,GAAmBP,OAAO,CAACxC,mBAAD,CAA1B;IACAuC,KAAK,CAACS,MAAN,GAAeT,KAAK,CAACQ,UAArB;;IACA,MAAI,CAAClB,IAAL,CAAU,UAAV,EAAsBU,KAAtB;EACD,CAND;EAQA,KAAKU,WAAL,GAAmB,IAAnB;EAEAV,KAAK,CAACK,IAAN,CAAW,OAAX,EAAoB;IAAA,OAAM,MAAI,CAACf,IAAL,CAAU,OAAV,CAAN;EAAA,CAApB;EACAU,KAAK,CAACZ,EAAN,CAAS,OAAT,EAAkB,UAACC,KAAD;IAAA,OAAW,MAAI,CAACC,IAAL,CAAU,OAAV,EAAmBD,KAAnB,CAAX;EAAA,CAAlB;EACAW,KAAK,CAACZ,EAAN,CAAS,OAAT,EAAkB;IAAA,OAAM,MAAI,CAACH,OAAL,CAAa0B,KAAb,EAAN;EAAA,CAAlB;EAEA,KAAKX,KAAL,GAAaA,KAAb;EACA,OAAOA,KAAP;AACD,CAhCD;;AAkCA7B,OAAO,CAACqB,SAAR,CAAkBe,eAAlB,GAAoC,UAAUN,OAAV,EAAmB;EACrD,IAAMW,IAAI,GAAGT,MAAM,CAACS,IAAP,CAAYX,OAAZ,CAAb;EACA,IAAMY,YAAY,GAAG,EAArB;;EACA,yBAAgBD,IAAhB,2BAAsB;IAAjB,IAAIE,GAAG,YAAP;IACH,IAAIC,KAAK,GAAGd,OAAO,CAACa,GAAD,CAAnB;IACAA,GAAG,GAAGA,GAAG,CAACE,WAAJ,EAAN;;IACA,QAAQF,GAAR;MACE,KAAKjD,uBAAL;QACEkD,KAAK,GAAGE,KAAK,CAACC,OAAN,CAAcH,KAAd,IAAuBA,KAAvB,GAA+B,CAACA,KAAD,CAAvC;QACA;;MACF;QACE;IALJ;;IAQAF,YAAY,CAACC,GAAD,CAAZ,GAAoBC,KAApB;EACD;;EAED,OAAOF,YAAP;AACD,CAlBD;;AAoBA1C,OAAO,CAACqB,SAAR,CAAkBU,gBAAlB,GAAqC,UAAUD,OAAV,EAAmB;EACtD,IAAMW,IAAI,GAAGT,MAAM,CAACS,IAAP,CAAYX,OAAZ,CAAb;EACA,IAAMY,YAAY,GAAG,EAArB;;EACA,2BAAgBD,IAAhB,8BAAsB;IAAjB,IAAIE,GAAG,cAAP;IACH,IAAIC,KAAK,GAAGd,OAAO,CAACa,GAAD,CAAnB;IACAA,GAAG,GAAGA,GAAG,CAACE,WAAJ,EAAN;;IACA,QAAQF,GAAR;MACE,KAAKlD,iBAAL;QACEkD,GAAG,GAAGnD,sBAAN;QACAoD,KAAK,GAAG,yBAAyBI,IAAzB,CAA8BJ,KAA9B,IACJ9D,KAAK,CAAC8D,KAAD,CAAL,CAAavC,IADT,GAEJuC,KAFJ;QAGA;;MACF;QACE;IARJ;;IAWAF,YAAY,CAACC,GAAD,CAAZ,GAAoBC,KAApB;EACD;;EAED,OAAOF,YAAP;AACD,CArBD;;AAuBA1C,OAAO,CAACqB,SAAR,CAAkBL,SAAlB,GAA8B,UAAUiC,IAAV,EAAgBL,KAAhB,EAAuB;EACnD,KAAK/B,QAAL,CAAcoC,IAAI,CAACJ,WAAL,EAAd,IAAoCD,KAApC;AACD,CAFD;;AAIA5C,OAAO,CAACqB,SAAR,CAAkB6B,SAAlB,GAA8B,UAAUD,IAAV,EAAgB;EAC5C,OAAO,KAAKpC,QAAL,CAAcoC,IAAI,CAACJ,WAAL,EAAd,CAAP;AACD,CAFD;;AAIA7C,OAAO,CAACqB,SAAR,CAAkB8B,KAAlB,GAA0B,UAAUC,IAAV,EAAgBC,QAAhB,EAA0B;EAClD,IAAMxB,KAAK,GAAG,KAAKD,QAAL,EAAd;EACA,OAAOC,KAAK,CAACsB,KAAN,CAAYC,IAAZ,EAAkBC,QAAlB,CAAP;AACD,CAHD;;AAKArD,OAAO,CAACqB,SAAR,CAAkBiC,IAAlB,GAAyB,UAAUC,MAAV,EAAkBxD,OAAlB,EAA2B;EAClD,IAAM8B,KAAK,GAAG,KAAKD,QAAL,EAAd;EACA,OAAOC,KAAK,CAACyB,IAAN,CAAWC,MAAX,EAAmBxD,OAAnB,CAAP;AACD,CAHD;;AAKAC,OAAO,CAACqB,SAAR,CAAkBmC,GAAlB,GAAwB,UAAUJ,IAAV,EAAgB;EACtC,IAAMvB,KAAK,GAAG,KAAKD,QAAL,EAAd;EACAC,KAAK,CAAC2B,GAAN,CAAUJ,IAAV;AACD,CAHD,C,CAKA;;;AACApD,OAAO,CAACqB,SAAR,CAAkBoC,KAAlB,GAA0B,UAAUL,IAAV,EAAgB;EACxC,IAAMvB,KAAK,GAAG,KAAKD,QAAL,EAAd;EACAC,KAAK,CAACW,KAAN,CAAY7C,cAAZ;EACA,KAAKmB,OAAL,CAAa4C,OAAb;AACD,CAJD;;AAMAC,OAAO,CAAC/D,WAAR,GAAsBA,WAAtB"} \ No newline at end of file diff --git a/node_modules/superagent/lib/node/index.js b/node_modules/superagent/lib/node/index.js new file mode 100644 index 0000000..7a30044 --- /dev/null +++ b/node_modules/superagent/lib/node/index.js @@ -0,0 +1,1433 @@ +"use strict"; + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); } + +function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter); } + +function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +/** + * Module dependencies. + */ +// eslint-disable-next-line node/no-deprecated-api +var _require = require('url'), + parse = _require.parse, + format = _require.format, + resolve = _require.resolve; + +var Stream = require('stream'); + +var https = require('https'); + +var http = require('http'); + +var fs = require('fs'); + +var zlib = require('zlib'); + +var util = require('util'); + +var qs = require('qs'); + +var mime = require('mime'); + +var methods = require('methods'); + +var FormData = require('form-data'); + +var formidable = require('formidable'); + +var debug = require('debug')('superagent'); + +var CookieJar = require('cookiejar'); + +var semverGte = require('semver/functions/gte'); + +var safeStringify = require('fast-safe-stringify'); + +var utils = require('../utils'); + +var RequestBase = require('../request-base'); + +var _require2 = require('./unzip'), + unzip = _require2.unzip; + +var Response = require('./response'); + +var mixin = utils.mixin, + hasOwn = utils.hasOwn; +var http2; +if (semverGte(process.version, 'v10.10.0')) http2 = require('./http2wrapper'); + +function request(method, url) { + // callback + if (typeof url === 'function') { + return new exports.Request('GET', method).end(url); + } // url first + + + if (arguments.length === 1) { + return new exports.Request('GET', method); + } + + return new exports.Request(method, url); +} + +module.exports = request; +exports = module.exports; +/** + * Expose `Request`. + */ + +exports.Request = Request; +/** + * Expose the agent function + */ + +exports.agent = require('./agent'); +/** + * Noop. + */ + +function noop() {} +/** + * Expose `Response`. + */ + + +exports.Response = Response; +/** + * Define "form" mime type. + */ + +mime.define({ + 'application/x-www-form-urlencoded': ['form', 'urlencoded', 'form-data'] +}, true); +/** + * Protocol map. + */ + +exports.protocols = { + 'http:': http, + 'https:': https, + 'http2:': http2 +}; +/** + * Default serialization map. + * + * superagent.serialize['application/xml'] = function(obj){ + * return 'generated xml here'; + * }; + * + */ + +exports.serialize = { + 'application/x-www-form-urlencoded': qs.stringify, + 'application/json': safeStringify +}; +/** + * Default parsers. + * + * superagent.parse['application/xml'] = function(res, fn){ + * fn(null, res); + * }; + * + */ + +exports.parse = require('./parsers'); +/** + * Default buffering map. Can be used to set certain + * response types to buffer/not buffer. + * + * superagent.buffer['application/xml'] = true; + */ + +exports.buffer = {}; +/** + * Initialize internal header tracking properties on a request instance. + * + * @param {Object} req the instance + * @api private + */ + +function _initHeaders(request_) { + request_._header = {// coerces header names to lowercase + }; + request_.header = {// preserves header name case + }; +} +/** + * Initialize a new `Request` with the given `method` and `url`. + * + * @param {String} method + * @param {String|Object} url + * @api public + */ + + +function Request(method, url) { + Stream.call(this); + if (typeof url !== 'string') url = format(url); + this._enableHttp2 = Boolean(process.env.HTTP2_TEST); // internal only + + this._agent = false; + this._formData = null; + this.method = method; + this.url = url; + + _initHeaders(this); + + this.writable = true; + this._redirects = 0; + this.redirects(method === 'HEAD' ? 0 : 5); + this.cookies = ''; + this.qs = {}; + this._query = []; + this.qsRaw = this._query; // Unused, for backwards compatibility only + + this._redirectList = []; + this._streamRequest = false; + this._lookup = undefined; + this.once('end', this.clearTimeout.bind(this)); +} +/** + * Inherit from `Stream` (which inherits from `EventEmitter`). + * Mixin `RequestBase`. + */ + + +util.inherits(Request, Stream); +mixin(Request.prototype, RequestBase.prototype); +/** + * Enable or Disable http2. + * + * Enable http2. + * + * ``` js + * request.get('http://localhost/') + * .http2() + * .end(callback); + * + * request.get('http://localhost/') + * .http2(true) + * .end(callback); + * ``` + * + * Disable http2. + * + * ``` js + * request = request.http2(); + * request.get('http://localhost/') + * .http2(false) + * .end(callback); + * ``` + * + * @param {Boolean} enable + * @return {Request} for chaining + * @api public + */ + +Request.prototype.http2 = function (bool) { + if (exports.protocols['http2:'] === undefined) { + throw new Error('superagent: this version of Node.js does not support http2'); + } + + this._enableHttp2 = bool === undefined ? true : bool; + return this; +}; +/** + * Queue the given `file` as an attachment to the specified `field`, + * with optional `options` (or filename). + * + * ``` js + * request.post('http://localhost/upload') + * .attach('field', Buffer.from('Hello world'), 'hello.html') + * .end(callback); + * ``` + * + * A filename may also be used: + * + * ``` js + * request.post('http://localhost/upload') + * .attach('files', 'image.jpg') + * .end(callback); + * ``` + * + * @param {String} field + * @param {String|fs.ReadStream|Buffer} file + * @param {String|Object} options + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.attach = function (field, file, options) { + var _this = this; + + if (file) { + if (this._data) { + throw new Error("superagent can't mix .send() and .attach()"); + } + + var o = options || {}; + + if (typeof options === 'string') { + o = { + filename: options + }; + } + + if (typeof file === 'string') { + if (!o.filename) o.filename = file; + debug('creating `fs.ReadStream` instance for file: %s', file); + file = fs.createReadStream(file); + file.on('error', function (error) { + var formData = _this._getFormData(); + + formData.emit('error', error); + }); + } else if (!o.filename && file.path) { + o.filename = file.path; + } + + this._getFormData().append(field, file, o); + } + + return this; +}; + +Request.prototype._getFormData = function () { + var _this2 = this; + + if (!this._formData) { + this._formData = new FormData(); + + this._formData.on('error', function (error) { + debug('FormData error', error); + + if (_this2.called) { + // The request has already finished and the callback was called. + // Silently ignore the error. + return; + } + + _this2.callback(error); + + _this2.abort(); + }); + } + + return this._formData; +}; +/** + * Gets/sets the `Agent` to use for this HTTP request. The default (if this + * function is not called) is to opt out of connection pooling (`agent: false`). + * + * @param {http.Agent} agent + * @return {http.Agent} + * @api public + */ + + +Request.prototype.agent = function (agent) { + if (arguments.length === 0) return this._agent; + this._agent = agent; + return this; +}; +/** + * Gets/sets the `lookup` function to use custom DNS resolver. + * + * @param {Function} lookup + * @return {Function} + * @api public + */ + + +Request.prototype.lookup = function (lookup) { + if (arguments.length === 0) return this._lookup; + this._lookup = lookup; + return this; +}; +/** + * Set _Content-Type_ response header passed through `mime.getType()`. + * + * Examples: + * + * request.post('/') + * .type('xml') + * .send(xmlstring) + * .end(callback); + * + * request.post('/') + * .type('json') + * .send(jsonstring) + * .end(callback); + * + * request.post('/') + * .type('application/json') + * .send(jsonstring) + * .end(callback); + * + * @param {String} type + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.type = function (type) { + return this.set('Content-Type', type.includes('/') ? type : mime.getType(type)); +}; +/** + * Set _Accept_ response header passed through `mime.getType()`. + * + * Examples: + * + * superagent.types.json = 'application/json'; + * + * request.get('/agent') + * .accept('json') + * .end(callback); + * + * request.get('/agent') + * .accept('application/json') + * .end(callback); + * + * @param {String} accept + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.accept = function (type) { + return this.set('Accept', type.includes('/') ? type : mime.getType(type)); +}; +/** + * Add query-string `val`. + * + * Examples: + * + * request.get('/shoes') + * .query('size=10') + * .query({ color: 'blue' }) + * + * @param {Object|String} val + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.query = function (value) { + if (typeof value === 'string') { + this._query.push(value); + } else { + Object.assign(this.qs, value); + } + + return this; +}; +/** + * Write raw `data` / `encoding` to the socket. + * + * @param {Buffer|String} data + * @param {String} encoding + * @return {Boolean} + * @api public + */ + + +Request.prototype.write = function (data, encoding) { + var request_ = this.request(); + + if (!this._streamRequest) { + this._streamRequest = true; + } + + return request_.write(data, encoding); +}; +/** + * Pipe the request body to `stream`. + * + * @param {Stream} stream + * @param {Object} options + * @return {Stream} + * @api public + */ + + +Request.prototype.pipe = function (stream, options) { + this.piped = true; // HACK... + + this.buffer(false); + this.end(); + return this._pipeContinue(stream, options); +}; + +Request.prototype._pipeContinue = function (stream, options) { + var _this3 = this; + + this.req.once('response', function (res) { + // redirect + if (isRedirect(res.statusCode) && _this3._redirects++ !== _this3._maxRedirects) { + return _this3._redirect(res) === _this3 ? _this3._pipeContinue(stream, options) : undefined; + } + + _this3.res = res; + + _this3._emitResponse(); + + if (_this3._aborted) return; + + if (_this3._shouldUnzip(res)) { + var unzipObject = zlib.createUnzip(); + unzipObject.on('error', function (error) { + if (error && error.code === 'Z_BUF_ERROR') { + // unexpected end of file is ignored by browsers and curl + stream.emit('end'); + return; + } + + stream.emit('error', error); + }); + res.pipe(unzipObject).pipe(stream, options); + } else { + res.pipe(stream, options); + } + + res.once('end', function () { + _this3.emit('end'); + }); + }); + return stream; +}; +/** + * Enable / disable buffering. + * + * @return {Boolean} [val] + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.buffer = function (value) { + this._buffer = value !== false; + return this; +}; +/** + * Redirect to `url + * + * @param {IncomingMessage} res + * @return {Request} for chaining + * @api private + */ + + +Request.prototype._redirect = function (res) { + var url = res.headers.location; + + if (!url) { + return this.callback(new Error('No location header for redirect'), res); + } + + debug('redirect %s -> %s', this.url, url); // location + + url = resolve(this.url, url); // ensure the response is being consumed + // this is required for Node v0.10+ + + res.resume(); + var headers = this.req.getHeaders ? this.req.getHeaders() : this.req._headers; + var changesOrigin = parse(url).host !== parse(this.url).host; // implementation of 302 following defacto standard + + if (res.statusCode === 301 || res.statusCode === 302) { + // strip Content-* related fields + // in case of POST etc + headers = utils.cleanHeader(headers, changesOrigin); // force GET + + this.method = this.method === 'HEAD' ? 'HEAD' : 'GET'; // clear data + + this._data = null; + } // 303 is always GET + + + if (res.statusCode === 303) { + // strip Content-* related fields + // in case of POST etc + headers = utils.cleanHeader(headers, changesOrigin); // force method + + this.method = 'GET'; // clear data + + this._data = null; + } // 307 preserves method + // 308 preserves method + + + delete headers.host; + delete this.req; + delete this._formData; // remove all add header except User-Agent + + _initHeaders(this); // redirect + + + this._endCalled = false; + this.url = url; + this.qs = {}; + this._query.length = 0; + this.set(headers); + this.emit('redirect', res); + + this._redirectList.push(this.url); + + this.end(this._callback); + return this; +}; +/** + * Set Authorization field value with `user` and `pass`. + * + * Examples: + * + * .auth('tobi', 'learnboost') + * .auth('tobi:learnboost') + * .auth('tobi') + * .auth(accessToken, { type: 'bearer' }) + * + * @param {String} user + * @param {String} [pass] + * @param {Object} [options] options with authorization type 'basic' or 'bearer' ('basic' is default) + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.auth = function (user, pass, options) { + if (arguments.length === 1) pass = ''; + + if (_typeof(pass) === 'object' && pass !== null) { + // pass is optional and can be replaced with options + options = pass; + pass = ''; + } + + if (!options) { + options = { + type: 'basic' + }; + } + + var encoder = function encoder(string) { + return Buffer.from(string).toString('base64'); + }; + + return this._auth(user, pass, options, encoder); +}; +/** + * Set the certificate authority option for https request. + * + * @param {Buffer | Array} cert + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.ca = function (cert) { + this._ca = cert; + return this; +}; +/** + * Set the client certificate key option for https request. + * + * @param {Buffer | String} cert + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.key = function (cert) { + this._key = cert; + return this; +}; +/** + * Set the key, certificate, and CA certs of the client in PFX or PKCS12 format. + * + * @param {Buffer | String} cert + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.pfx = function (cert) { + if (_typeof(cert) === 'object' && !Buffer.isBuffer(cert)) { + this._pfx = cert.pfx; + this._passphrase = cert.passphrase; + } else { + this._pfx = cert; + } + + return this; +}; +/** + * Set the client certificate option for https request. + * + * @param {Buffer | String} cert + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.cert = function (cert) { + this._cert = cert; + return this; +}; +/** + * Do not reject expired or invalid TLS certs. + * sets `rejectUnauthorized=true`. Be warned that this allows MITM attacks. + * + * @return {Request} for chaining + * @api public + */ + + +Request.prototype.disableTLSCerts = function () { + this._disableTLSCerts = true; + return this; +}; +/** + * Return an http[s] request. + * + * @return {OutgoingMessage} + * @api private + */ +// eslint-disable-next-line complexity + + +Request.prototype.request = function () { + var _this4 = this; + + if (this.req) return this.req; + var options = {}; + + try { + var query = qs.stringify(this.qs, { + indices: false, + strictNullHandling: true + }); + + if (query) { + this.qs = {}; + + this._query.push(query); + } + + this._finalizeQueryString(); + } catch (err) { + return this.emit('error', err); + } + + var url = this.url; + var retries = this._retries; // Capture backticks as-is from the final query string built above. + // Note: this'll only find backticks entered in req.query(String) + // calls, because qs.stringify unconditionally encodes backticks. + + var queryStringBackticks; + + if (url.includes('`')) { + var queryStartIndex = url.indexOf('?'); + + if (queryStartIndex !== -1) { + var queryString = url.slice(queryStartIndex + 1); + queryStringBackticks = queryString.match(/`|%60/g); + } + } // default to http:// + + + if (url.indexOf('http') !== 0) url = "http://".concat(url); + url = parse(url); // See https://github.com/visionmedia/superagent/issues/1367 + + if (queryStringBackticks) { + var i = 0; + url.query = url.query.replace(/%60/g, function () { + return queryStringBackticks[i++]; + }); + url.search = "?".concat(url.query); + url.path = url.pathname + url.search; + } // support unix sockets + + + if (/^https?\+unix:/.test(url.protocol) === true) { + // get the protocol + url.protocol = "".concat(url.protocol.split('+')[0], ":"); // get the socket, path + + var unixParts = url.path.match(/^([^/]+)(.+)$/); + options.socketPath = unixParts[1].replace(/%2F/g, '/'); + url.path = unixParts[2]; + } // Override IP address of a hostname + + + if (this._connectOverride) { + var _url = url, + hostname = _url.hostname; + var match = hostname in this._connectOverride ? this._connectOverride[hostname] : this._connectOverride['*']; + + if (match) { + // backup the real host + if (!this._header.host) { + this.set('host', url.host); + } + + var newHost; + var newPort; + + if (_typeof(match) === 'object') { + newHost = match.host; + newPort = match.port; + } else { + newHost = match; + newPort = url.port; + } // wrap [ipv6] + + + url.host = /:/.test(newHost) ? "[".concat(newHost, "]") : newHost; + + if (newPort) { + url.host += ":".concat(newPort); + url.port = newPort; + } + + url.hostname = newHost; + } + } // options + + + options.method = this.method; + options.port = url.port; + options.path = url.path; + options.host = url.hostname; + options.ca = this._ca; + options.key = this._key; + options.pfx = this._pfx; + options.cert = this._cert; + options.passphrase = this._passphrase; + options.agent = this._agent; + options.lookup = this._lookup; + options.rejectUnauthorized = typeof this._disableTLSCerts === 'boolean' ? !this._disableTLSCerts : process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'; // Allows request.get('https://1.2.3.4/').set('Host', 'example.com') + + if (this._header.host) { + options.servername = this._header.host.replace(/:\d+$/, ''); + } + + if (this._trustLocalhost && /^(?:localhost|127\.0\.0\.\d+|(0*:)+:0*1)$/.test(url.hostname)) { + options.rejectUnauthorized = false; + } // initiate request + + + var module_ = this._enableHttp2 ? exports.protocols['http2:'].setProtocol(url.protocol) : exports.protocols[url.protocol]; // request + + this.req = module_.request(options); + var req = this.req; // set tcp no delay + + req.setNoDelay(true); + + if (options.method !== 'HEAD') { + req.setHeader('Accept-Encoding', 'gzip, deflate'); + } + + this.protocol = url.protocol; + this.host = url.host; // expose events + + req.once('drain', function () { + _this4.emit('drain'); + }); + req.on('error', function (error) { + // flag abortion here for out timeouts + // because node will emit a faux-error "socket hang up" + // when request is aborted before a connection is made + if (_this4._aborted) return; // if not the same, we are in the **old** (cancelled) request, + // so need to continue (same as for above) + + if (_this4._retries !== retries) return; // if we've received a response then we don't want to let + // an error in the request blow up the response + + if (_this4.response) return; + + _this4.callback(error); + }); // auth + + if (url.auth) { + var auth = url.auth.split(':'); + this.auth(auth[0], auth[1]); + } + + if (this.username && this.password) { + this.auth(this.username, this.password); + } + + for (var key in this.header) { + if (hasOwn(this.header, key)) req.setHeader(key, this.header[key]); + } // add cookies + + + if (this.cookies) { + if (hasOwn(this._header, 'cookie')) { + // merge + var temporaryJar = new CookieJar.CookieJar(); + temporaryJar.setCookies(this._header.cookie.split(';')); + temporaryJar.setCookies(this.cookies.split(';')); + req.setHeader('Cookie', temporaryJar.getCookies(CookieJar.CookieAccessInfo.All).toValueString()); + } else { + req.setHeader('Cookie', this.cookies); + } + } + + return req; +}; +/** + * Invoke the callback with `err` and `res` + * and handle arity check. + * + * @param {Error} err + * @param {Response} res + * @api private + */ + + +Request.prototype.callback = function (error, res) { + if (this._shouldRetry(error, res)) { + return this._retry(); + } // Avoid the error which is emitted from 'socket hang up' to cause the fn undefined error on JS runtime. + + + var fn = this._callback || noop; + this.clearTimeout(); + if (this.called) return console.warn('superagent: double callback bug'); + this.called = true; + + if (!error) { + try { + if (!this._isResponseOK(res)) { + var message = 'Unsuccessful HTTP response'; + + if (res) { + message = http.STATUS_CODES[res.status] || message; + } + + error = new Error(message); + error.status = res ? res.status : undefined; + } + } catch (err) { + error = err; + } + } // It's important that the callback is called outside try/catch + // to avoid double callback + + + if (!error) { + return fn(null, res); + } + + error.response = res; + if (this._maxRetries) error.retries = this._retries - 1; // only emit error event if there is a listener + // otherwise we assume the callback to `.end()` will get the error + + if (error && this.listeners('error').length > 0) { + this.emit('error', error); + } + + fn(error, res); +}; +/** + * Check if `obj` is a host object, + * + * @param {Object} obj host object + * @return {Boolean} is a host object + * @api private + */ + + +Request.prototype._isHost = function (object) { + return Buffer.isBuffer(object) || object instanceof Stream || object instanceof FormData; +}; +/** + * Initiate request, invoking callback `fn(err, res)` + * with an instanceof `Response`. + * + * @param {Function} fn + * @return {Request} for chaining + * @api public + */ + + +Request.prototype._emitResponse = function (body, files) { + var response = new Response(this); + this.response = response; + response.redirects = this._redirectList; + + if (undefined !== body) { + response.body = body; + } + + response.files = files; + + if (this._endCalled) { + response.pipe = function () { + throw new Error("end() has already been called, so it's too late to start piping"); + }; + } + + this.emit('response', response); + return response; +}; + +Request.prototype.end = function (fn) { + this.request(); + debug('%s %s', this.method, this.url); + + if (this._endCalled) { + throw new Error('.end() was called twice. This is not supported in superagent'); + } + + this._endCalled = true; // store callback + + this._callback = fn || noop; + + this._end(); +}; + +Request.prototype._end = function () { + var _this5 = this; + + if (this._aborted) return this.callback(new Error('The request has been aborted even before .end() was called')); + var data = this._data; + var req = this.req; + var method = this.method; + + this._setTimeouts(); // body + + + if (method !== 'HEAD' && !req._headerSent) { + // serialize stuff + if (typeof data !== 'string') { + var contentType = req.getHeader('Content-Type'); // Parse out just the content type from the header (ignore the charset) + + if (contentType) contentType = contentType.split(';')[0]; + var serialize = this._serializer || exports.serialize[contentType]; + + if (!serialize && isJSON(contentType)) { + serialize = exports.serialize['application/json']; + } + + if (serialize) data = serialize(data); + } // content-length + + + if (data && !req.getHeader('Content-Length')) { + req.setHeader('Content-Length', Buffer.isBuffer(data) ? data.length : Buffer.byteLength(data)); + } + } // response + // eslint-disable-next-line complexity + + + req.once('response', function (res) { + debug('%s %s -> %s', _this5.method, _this5.url, res.statusCode); + + if (_this5._responseTimeoutTimer) { + clearTimeout(_this5._responseTimeoutTimer); + } + + if (_this5.piped) { + return; + } + + var max = _this5._maxRedirects; + var mime = utils.type(res.headers['content-type'] || '') || 'text/plain'; + var type = mime.split('/')[0]; + if (type) type = type.toLowerCase().trim(); + var multipart = type === 'multipart'; + var redirect = isRedirect(res.statusCode); + var responseType = _this5._responseType; + _this5.res = res; // redirect + + if (redirect && _this5._redirects++ !== max) { + return _this5._redirect(res); + } + + if (_this5.method === 'HEAD') { + _this5.emit('end'); + + _this5.callback(null, _this5._emitResponse()); + + return; + } // zlib support + + + if (_this5._shouldUnzip(res)) { + unzip(req, res); + } + + var buffer = _this5._buffer; + + if (buffer === undefined && mime in exports.buffer) { + buffer = Boolean(exports.buffer[mime]); + } + + var parser = _this5._parser; + + if (undefined === buffer && parser) { + console.warn("A custom superagent parser has been set, but buffering strategy for the parser hasn't been configured. Call `req.buffer(true or false)` or set `superagent.buffer[mime] = true or false`"); + buffer = true; + } + + if (!parser) { + if (responseType) { + parser = exports.parse.image; // It's actually a generic Buffer + + buffer = true; + } else if (multipart) { + var form = formidable(); + parser = form.parse.bind(form); + buffer = true; + } else if (isImageOrVideo(mime)) { + parser = exports.parse.image; + buffer = true; // For backwards-compatibility buffering default is ad-hoc MIME-dependent + } else if (exports.parse[mime]) { + parser = exports.parse[mime]; + } else if (type === 'text') { + parser = exports.parse.text; + buffer = buffer !== false; // everyone wants their own white-labeled json + } else if (isJSON(mime)) { + parser = exports.parse['application/json']; + buffer = buffer !== false; + } else if (buffer) { + parser = exports.parse.text; + } else if (undefined === buffer) { + parser = exports.parse.image; // It's actually a generic Buffer + + buffer = true; + } + } // by default only buffer text/*, json and messed up thing from hell + + + if (undefined === buffer && isText(mime) || isJSON(mime)) { + buffer = true; + } + + _this5._resBuffered = buffer; + var parserHandlesEnd = false; + + if (buffer) { + // Protectiona against zip bombs and other nuisance + var responseBytesLeft = _this5._maxResponseSize || 200000000; + res.on('data', function (buf) { + responseBytesLeft -= buf.byteLength || buf.length > 0 ? buf.length : 0; + + if (responseBytesLeft < 0) { + // This will propagate through error event + var error = new Error('Maximum response size reached'); + error.code = 'ETOOLARGE'; // Parsers aren't required to observe error event, + // so would incorrectly report success + + parserHandlesEnd = false; // Will not emit error event + + res.destroy(error); // so we do callback now + + _this5.callback(error, null); + } + }); + } + + if (parser) { + try { + // Unbuffered parsers are supposed to emit response early, + // which is weird BTW, because response.body won't be there. + parserHandlesEnd = buffer; + parser(res, function (error, object, files) { + if (_this5.timedout) { + // Timeout has already handled all callbacks + return; + } // Intentional (non-timeout) abort is supposed to preserve partial response, + // even if it doesn't parse. + + + if (error && !_this5._aborted) { + return _this5.callback(error); + } + + if (parserHandlesEnd) { + _this5.emit('end'); + + _this5.callback(null, _this5._emitResponse(object, files)); + } + }); + } catch (err) { + _this5.callback(err); + + return; + } + } + + _this5.res = res; // unbuffered + + if (!buffer) { + debug('unbuffered %s %s', _this5.method, _this5.url); + + _this5.callback(null, _this5._emitResponse()); + + if (multipart) return; // allow multipart to handle end event + + res.once('end', function () { + debug('end %s %s', _this5.method, _this5.url); + + _this5.emit('end'); + }); + return; + } // terminating events + + + res.once('error', function (error) { + parserHandlesEnd = false; + + _this5.callback(error, null); + }); + if (!parserHandlesEnd) res.once('end', function () { + debug('end %s %s', _this5.method, _this5.url); // TODO: unless buffering emit earlier to stream + + _this5.emit('end'); + + _this5.callback(null, _this5._emitResponse()); + }); + }); + this.emit('request', this); + + var getProgressMonitor = function getProgressMonitor() { + var lengthComputable = true; + var total = req.getHeader('Content-Length'); + var loaded = 0; + var progress = new Stream.Transform(); + + progress._transform = function (chunk, encoding, callback) { + loaded += chunk.length; + + _this5.emit('progress', { + direction: 'upload', + lengthComputable: lengthComputable, + loaded: loaded, + total: total + }); + + callback(null, chunk); + }; + + return progress; + }; + + var bufferToChunks = function bufferToChunks(buffer) { + var chunkSize = 16 * 1024; // default highWaterMark value + + var chunking = new Stream.Readable(); + var totalLength = buffer.length; + var remainder = totalLength % chunkSize; + var cutoff = totalLength - remainder; + + for (var i = 0; i < cutoff; i += chunkSize) { + var chunk = buffer.slice(i, i + chunkSize); + chunking.push(chunk); + } + + if (remainder > 0) { + var remainderBuffer = buffer.slice(-remainder); + chunking.push(remainderBuffer); + } + + chunking.push(null); // no more data + + return chunking; + }; // if a FormData instance got created, then we send that as the request body + + + var formData = this._formData; + + if (formData) { + // set headers + var headers = formData.getHeaders(); + + for (var i in headers) { + if (hasOwn(headers, i)) { + debug('setting FormData header: "%s: %s"', i, headers[i]); + req.setHeader(i, headers[i]); + } + } // attempt to get "Content-Length" header + + + formData.getLength(function (error, length) { + // TODO: Add chunked encoding when no length (if err) + if (error) debug('formData.getLength had error', error, length); + debug('got FormData Content-Length: %s', length); + + if (typeof length === 'number') { + req.setHeader('Content-Length', length); + } + + formData.pipe(getProgressMonitor()).pipe(req); + }); + } else if (Buffer.isBuffer(data)) { + bufferToChunks(data).pipe(getProgressMonitor()).pipe(req); + } else { + req.end(data); + } +}; // Check whether response has a non-0-sized gzip-encoded body + + +Request.prototype._shouldUnzip = function (res) { + if (res.statusCode === 204 || res.statusCode === 304) { + // These aren't supposed to have any body + return false; + } // header content is a string, and distinction between 0 and no information is crucial + + + if (res.headers['content-length'] === '0') { + // We know that the body is empty (unfortunately, this check does not cover chunked encoding) + return false; + } // console.log(res); + + + return /^\s*(?:deflate|gzip)\s*$/.test(res.headers['content-encoding']); +}; +/** + * Overrides DNS for selected hostnames. Takes object mapping hostnames to IP addresses. + * + * When making a request to a URL with a hostname exactly matching a key in the object, + * use the given IP address to connect, instead of using DNS to resolve the hostname. + * + * A special host `*` matches every hostname (keep redirects in mind!) + * + * request.connect({ + * 'test.example.com': '127.0.0.1', + * 'ipv6.example.com': '::1', + * }) + */ + + +Request.prototype.connect = function (connectOverride) { + if (typeof connectOverride === 'string') { + this._connectOverride = { + '*': connectOverride + }; + } else if (_typeof(connectOverride) === 'object') { + this._connectOverride = connectOverride; + } else { + this._connectOverride = undefined; + } + + return this; +}; + +Request.prototype.trustLocalhost = function (toggle) { + this._trustLocalhost = toggle === undefined ? true : toggle; + return this; +}; // generate HTTP verb methods + + +if (!methods.includes('del')) { + // create a copy so we don't cause conflicts with + // other packages using the methods package and + // npm 3.x + methods = _toConsumableArray(methods); + methods.push('del'); +} + +var _iterator = _createForOfIteratorHelper(methods), + _step; + +try { + var _loop = function _loop() { + var method = _step.value; + var name = method; + method = method === 'del' ? 'delete' : method; + method = method.toUpperCase(); + + request[name] = function (url, data, fn) { + var request_ = request(method, url); + + if (typeof data === 'function') { + fn = data; + data = null; + } + + if (data) { + if (method === 'GET' || method === 'HEAD') { + request_.query(data); + } else { + request_.send(data); + } + } + + if (fn) request_.end(fn); + return request_; + }; + }; + + for (_iterator.s(); !(_step = _iterator.n()).done;) { + _loop(); + } + /** + * Check if `mime` is text and should be buffered. + * + * @param {String} mime + * @return {Boolean} + * @api public + */ + +} catch (err) { + _iterator.e(err); +} finally { + _iterator.f(); +} + +function isText(mime) { + var parts = mime.split('/'); + var type = parts[0]; + if (type) type = type.toLowerCase().trim(); + var subtype = parts[1]; + if (subtype) subtype = subtype.toLowerCase().trim(); + return type === 'text' || subtype === 'x-www-form-urlencoded'; +} + +function isImageOrVideo(mime) { + var type = mime.split('/')[0]; + if (type) type = type.toLowerCase().trim(); + return type === 'image' || type === 'video'; +} +/** + * Check if `mime` is json or has +json structured syntax suffix. + * + * @param {String} mime + * @return {Boolean} + * @api private + */ + + +function isJSON(mime) { + // should match /json or +json + // but not /json-seq + return /[/+]json($|[^-\w])/i.test(mime); +} +/** + * Check if we should follow the redirect `code`. + * + * @param {Number} code + * @return {Boolean} + * @api private + */ + + +function isRedirect(code) { + return [301, 302, 303, 305, 307, 308].includes(code); +} +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["require","parse","format","resolve","Stream","https","http","fs","zlib","util","qs","mime","methods","FormData","formidable","debug","CookieJar","semverGte","safeStringify","utils","RequestBase","unzip","Response","mixin","hasOwn","http2","process","version","request","method","url","exports","Request","end","arguments","length","module","agent","noop","define","protocols","serialize","stringify","buffer","_initHeaders","request_","_header","header","call","_enableHttp2","Boolean","env","HTTP2_TEST","_agent","_formData","writable","_redirects","redirects","cookies","_query","qsRaw","_redirectList","_streamRequest","_lookup","undefined","once","clearTimeout","bind","inherits","prototype","bool","Error","attach","field","file","options","_data","o","filename","createReadStream","on","error","formData","_getFormData","emit","path","append","called","callback","abort","lookup","type","set","includes","getType","accept","query","value","push","Object","assign","write","data","encoding","pipe","stream","piped","_pipeContinue","req","res","isRedirect","statusCode","_maxRedirects","_redirect","_emitResponse","_aborted","_shouldUnzip","unzipObject","createUnzip","code","_buffer","headers","location","resume","getHeaders","_headers","changesOrigin","host","cleanHeader","_endCalled","_callback","auth","user","pass","encoder","string","Buffer","from","toString","_auth","ca","cert","_ca","key","_key","pfx","isBuffer","_pfx","_passphrase","passphrase","_cert","disableTLSCerts","_disableTLSCerts","indices","strictNullHandling","_finalizeQueryString","err","retries","_retries","queryStringBackticks","queryStartIndex","indexOf","queryString","slice","match","i","replace","search","pathname","test","protocol","split","unixParts","socketPath","_connectOverride","hostname","newHost","newPort","port","rejectUnauthorized","NODE_TLS_REJECT_UNAUTHORIZED","servername","_trustLocalhost","module_","setProtocol","setNoDelay","setHeader","response","username","password","temporaryJar","setCookies","cookie","getCookies","CookieAccessInfo","All","toValueString","_shouldRetry","_retry","fn","console","warn","_isResponseOK","message","STATUS_CODES","status","_maxRetries","listeners","_isHost","object","body","files","_end","_setTimeouts","_headerSent","contentType","getHeader","_serializer","isJSON","byteLength","_responseTimeoutTimer","max","toLowerCase","trim","multipart","redirect","responseType","_responseType","parser","_parser","image","form","isImageOrVideo","text","isText","_resBuffered","parserHandlesEnd","responseBytesLeft","_maxResponseSize","buf","destroy","timedout","getProgressMonitor","lengthComputable","total","loaded","progress","Transform","_transform","chunk","direction","bufferToChunks","chunkSize","chunking","Readable","totalLength","remainder","cutoff","remainderBuffer","getLength","connect","connectOverride","trustLocalhost","toggle","name","toUpperCase","send","parts","subtype"],"sources":["../../src/node/index.js"],"sourcesContent":["/**\n * Module dependencies.\n */\n\n// eslint-disable-next-line node/no-deprecated-api\nconst { parse, format, resolve } = require('url');\nconst Stream = require('stream');\nconst https = require('https');\nconst http = require('http');\nconst fs = require('fs');\nconst zlib = require('zlib');\nconst util = require('util');\nconst qs = require('qs');\nconst mime = require('mime');\nlet methods = require('methods');\nconst FormData = require('form-data');\nconst formidable = require('formidable');\nconst debug = require('debug')('superagent');\nconst CookieJar = require('cookiejar');\nconst semverGte = require('semver/functions/gte');\nconst safeStringify = require('fast-safe-stringify');\n\nconst utils = require('../utils');\nconst RequestBase = require('../request-base');\nconst { unzip } = require('./unzip');\nconst Response = require('./response');\n\nconst { mixin, hasOwn } = utils;\n\nlet http2;\n\nif (semverGte(process.version, 'v10.10.0')) http2 = require('./http2wrapper');\n\nfunction request(method, url) {\n  // callback\n  if (typeof url === 'function') {\n    return new exports.Request('GET', method).end(url);\n  }\n\n  // url first\n  if (arguments.length === 1) {\n    return new exports.Request('GET', method);\n  }\n\n  return new exports.Request(method, url);\n}\n\nmodule.exports = request;\nexports = module.exports;\n\n/**\n * Expose `Request`.\n */\n\nexports.Request = Request;\n\n/**\n * Expose the agent function\n */\n\nexports.agent = require('./agent');\n\n/**\n * Noop.\n */\n\nfunction noop() {}\n\n/**\n * Expose `Response`.\n */\n\nexports.Response = Response;\n\n/**\n * Define \"form\" mime type.\n */\n\nmime.define(\n  {\n    'application/x-www-form-urlencoded': ['form', 'urlencoded', 'form-data']\n  },\n  true\n);\n\n/**\n * Protocol map.\n */\n\nexports.protocols = {\n  'http:': http,\n  'https:': https,\n  'http2:': http2\n};\n\n/**\n * Default serialization map.\n *\n *     superagent.serialize['application/xml'] = function(obj){\n *       return 'generated xml here';\n *     };\n *\n */\n\nexports.serialize = {\n  'application/x-www-form-urlencoded': qs.stringify,\n  'application/json': safeStringify\n};\n\n/**\n * Default parsers.\n *\n *     superagent.parse['application/xml'] = function(res, fn){\n *       fn(null, res);\n *     };\n *\n */\n\nexports.parse = require('./parsers');\n\n/**\n * Default buffering map. Can be used to set certain\n * response types to buffer/not buffer.\n *\n *     superagent.buffer['application/xml'] = true;\n */\nexports.buffer = {};\n\n/**\n * Initialize internal header tracking properties on a request instance.\n *\n * @param {Object} req the instance\n * @api private\n */\nfunction _initHeaders(request_) {\n  request_._header = {\n    // coerces header names to lowercase\n  };\n  request_.header = {\n    // preserves header name case\n  };\n}\n\n/**\n * Initialize a new `Request` with the given `method` and `url`.\n *\n * @param {String} method\n * @param {String|Object} url\n * @api public\n */\n\nfunction Request(method, url) {\n  Stream.call(this);\n  if (typeof url !== 'string') url = format(url);\n  this._enableHttp2 = Boolean(process.env.HTTP2_TEST); // internal only\n  this._agent = false;\n  this._formData = null;\n  this.method = method;\n  this.url = url;\n  _initHeaders(this);\n  this.writable = true;\n  this._redirects = 0;\n  this.redirects(method === 'HEAD' ? 0 : 5);\n  this.cookies = '';\n  this.qs = {};\n  this._query = [];\n  this.qsRaw = this._query; // Unused, for backwards compatibility only\n  this._redirectList = [];\n  this._streamRequest = false;\n  this._lookup = undefined;\n  this.once('end', this.clearTimeout.bind(this));\n}\n\n/**\n * Inherit from `Stream` (which inherits from `EventEmitter`).\n * Mixin `RequestBase`.\n */\nutil.inherits(Request, Stream);\n\nmixin(Request.prototype, RequestBase.prototype);\n\n/**\n * Enable or Disable http2.\n *\n * Enable http2.\n *\n * ``` js\n * request.get('http://localhost/')\n *   .http2()\n *   .end(callback);\n *\n * request.get('http://localhost/')\n *   .http2(true)\n *   .end(callback);\n * ```\n *\n * Disable http2.\n *\n * ``` js\n * request = request.http2();\n * request.get('http://localhost/')\n *   .http2(false)\n *   .end(callback);\n * ```\n *\n * @param {Boolean} enable\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.http2 = function (bool) {\n  if (exports.protocols['http2:'] === undefined) {\n    throw new Error(\n      'superagent: this version of Node.js does not support http2'\n    );\n  }\n\n  this._enableHttp2 = bool === undefined ? true : bool;\n  return this;\n};\n\n/**\n * Queue the given `file` as an attachment to the specified `field`,\n * with optional `options` (or filename).\n *\n * ``` js\n * request.post('http://localhost/upload')\n *   .attach('field', Buffer.from('<b>Hello world</b>'), 'hello.html')\n *   .end(callback);\n * ```\n *\n * A filename may also be used:\n *\n * ``` js\n * request.post('http://localhost/upload')\n *   .attach('files', 'image.jpg')\n *   .end(callback);\n * ```\n *\n * @param {String} field\n * @param {String|fs.ReadStream|Buffer} file\n * @param {String|Object} options\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.attach = function (field, file, options) {\n  if (file) {\n    if (this._data) {\n      throw new Error(\"superagent can't mix .send() and .attach()\");\n    }\n\n    let o = options || {};\n    if (typeof options === 'string') {\n      o = { filename: options };\n    }\n\n    if (typeof file === 'string') {\n      if (!o.filename) o.filename = file;\n      debug('creating `fs.ReadStream` instance for file: %s', file);\n      file = fs.createReadStream(file);\n      file.on('error', (error) => {\n        const formData = this._getFormData();\n        formData.emit('error', error);\n      });\n    } else if (!o.filename && file.path) {\n      o.filename = file.path;\n    }\n\n    this._getFormData().append(field, file, o);\n  }\n\n  return this;\n};\n\nRequest.prototype._getFormData = function () {\n  if (!this._formData) {\n    this._formData = new FormData();\n    this._formData.on('error', (error) => {\n      debug('FormData error', error);\n      if (this.called) {\n        // The request has already finished and the callback was called.\n        // Silently ignore the error.\n        return;\n      }\n\n      this.callback(error);\n      this.abort();\n    });\n  }\n\n  return this._formData;\n};\n\n/**\n * Gets/sets the `Agent` to use for this HTTP request. The default (if this\n * function is not called) is to opt out of connection pooling (`agent: false`).\n *\n * @param {http.Agent} agent\n * @return {http.Agent}\n * @api public\n */\n\nRequest.prototype.agent = function (agent) {\n  if (arguments.length === 0) return this._agent;\n  this._agent = agent;\n  return this;\n};\n\n/**\n * Gets/sets the `lookup` function to use custom DNS resolver.\n *\n * @param {Function} lookup\n * @return {Function}\n * @api public\n */\n\nRequest.prototype.lookup = function (lookup) {\n  if (arguments.length === 0) return this._lookup;\n  this._lookup = lookup;\n  return this;\n};\n\n/**\n * Set _Content-Type_ response header passed through `mime.getType()`.\n *\n * Examples:\n *\n *      request.post('/')\n *        .type('xml')\n *        .send(xmlstring)\n *        .end(callback);\n *\n *      request.post('/')\n *        .type('json')\n *        .send(jsonstring)\n *        .end(callback);\n *\n *      request.post('/')\n *        .type('application/json')\n *        .send(jsonstring)\n *        .end(callback);\n *\n * @param {String} type\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.type = function (type) {\n  return this.set(\n    'Content-Type',\n    type.includes('/') ? type : mime.getType(type)\n  );\n};\n\n/**\n * Set _Accept_ response header passed through `mime.getType()`.\n *\n * Examples:\n *\n *      superagent.types.json = 'application/json';\n *\n *      request.get('/agent')\n *        .accept('json')\n *        .end(callback);\n *\n *      request.get('/agent')\n *        .accept('application/json')\n *        .end(callback);\n *\n * @param {String} accept\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.accept = function (type) {\n  return this.set('Accept', type.includes('/') ? type : mime.getType(type));\n};\n\n/**\n * Add query-string `val`.\n *\n * Examples:\n *\n *   request.get('/shoes')\n *     .query('size=10')\n *     .query({ color: 'blue' })\n *\n * @param {Object|String} val\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.query = function (value) {\n  if (typeof value === 'string') {\n    this._query.push(value);\n  } else {\n    Object.assign(this.qs, value);\n  }\n\n  return this;\n};\n\n/**\n * Write raw `data` / `encoding` to the socket.\n *\n * @param {Buffer|String} data\n * @param {String} encoding\n * @return {Boolean}\n * @api public\n */\n\nRequest.prototype.write = function (data, encoding) {\n  const request_ = this.request();\n  if (!this._streamRequest) {\n    this._streamRequest = true;\n  }\n\n  return request_.write(data, encoding);\n};\n\n/**\n * Pipe the request body to `stream`.\n *\n * @param {Stream} stream\n * @param {Object} options\n * @return {Stream}\n * @api public\n */\n\nRequest.prototype.pipe = function (stream, options) {\n  this.piped = true; // HACK...\n  this.buffer(false);\n  this.end();\n  return this._pipeContinue(stream, options);\n};\n\nRequest.prototype._pipeContinue = function (stream, options) {\n  this.req.once('response', (res) => {\n    // redirect\n    if (\n      isRedirect(res.statusCode) &&\n      this._redirects++ !== this._maxRedirects\n    ) {\n      return this._redirect(res) === this\n        ? this._pipeContinue(stream, options)\n        : undefined;\n    }\n\n    this.res = res;\n    this._emitResponse();\n    if (this._aborted) return;\n\n    if (this._shouldUnzip(res)) {\n      const unzipObject = zlib.createUnzip();\n      unzipObject.on('error', (error) => {\n        if (error && error.code === 'Z_BUF_ERROR') {\n          // unexpected end of file is ignored by browsers and curl\n          stream.emit('end');\n          return;\n        }\n\n        stream.emit('error', error);\n      });\n      res.pipe(unzipObject).pipe(stream, options);\n    } else {\n      res.pipe(stream, options);\n    }\n\n    res.once('end', () => {\n      this.emit('end');\n    });\n  });\n  return stream;\n};\n\n/**\n * Enable / disable buffering.\n *\n * @return {Boolean} [val]\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.buffer = function (value) {\n  this._buffer = value !== false;\n  return this;\n};\n\n/**\n * Redirect to `url\n *\n * @param {IncomingMessage} res\n * @return {Request} for chaining\n * @api private\n */\n\nRequest.prototype._redirect = function (res) {\n  let url = res.headers.location;\n  if (!url) {\n    return this.callback(new Error('No location header for redirect'), res);\n  }\n\n  debug('redirect %s -> %s', this.url, url);\n\n  // location\n  url = resolve(this.url, url);\n\n  // ensure the response is being consumed\n  // this is required for Node v0.10+\n  res.resume();\n\n  let headers = this.req.getHeaders ? this.req.getHeaders() : this.req._headers;\n\n  const changesOrigin = parse(url).host !== parse(this.url).host;\n\n  // implementation of 302 following defacto standard\n  if (res.statusCode === 301 || res.statusCode === 302) {\n    // strip Content-* related fields\n    // in case of POST etc\n    headers = utils.cleanHeader(headers, changesOrigin);\n\n    // force GET\n    this.method = this.method === 'HEAD' ? 'HEAD' : 'GET';\n\n    // clear data\n    this._data = null;\n  }\n\n  // 303 is always GET\n  if (res.statusCode === 303) {\n    // strip Content-* related fields\n    // in case of POST etc\n    headers = utils.cleanHeader(headers, changesOrigin);\n\n    // force method\n    this.method = 'GET';\n\n    // clear data\n    this._data = null;\n  }\n\n  // 307 preserves method\n  // 308 preserves method\n  delete headers.host;\n\n  delete this.req;\n  delete this._formData;\n\n  // remove all add header except User-Agent\n  _initHeaders(this);\n\n  // redirect\n  this._endCalled = false;\n  this.url = url;\n  this.qs = {};\n  this._query.length = 0;\n  this.set(headers);\n  this.emit('redirect', res);\n  this._redirectList.push(this.url);\n  this.end(this._callback);\n  return this;\n};\n\n/**\n * Set Authorization field value with `user` and `pass`.\n *\n * Examples:\n *\n *   .auth('tobi', 'learnboost')\n *   .auth('tobi:learnboost')\n *   .auth('tobi')\n *   .auth(accessToken, { type: 'bearer' })\n *\n * @param {String} user\n * @param {String} [pass]\n * @param {Object} [options] options with authorization type 'basic' or 'bearer' ('basic' is default)\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.auth = function (user, pass, options) {\n  if (arguments.length === 1) pass = '';\n  if (typeof pass === 'object' && pass !== null) {\n    // pass is optional and can be replaced with options\n    options = pass;\n    pass = '';\n  }\n\n  if (!options) {\n    options = { type: 'basic' };\n  }\n\n  const encoder = (string) => Buffer.from(string).toString('base64');\n\n  return this._auth(user, pass, options, encoder);\n};\n\n/**\n * Set the certificate authority option for https request.\n *\n * @param {Buffer | Array} cert\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.ca = function (cert) {\n  this._ca = cert;\n  return this;\n};\n\n/**\n * Set the client certificate key option for https request.\n *\n * @param {Buffer | String} cert\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.key = function (cert) {\n  this._key = cert;\n  return this;\n};\n\n/**\n * Set the key, certificate, and CA certs of the client in PFX or PKCS12 format.\n *\n * @param {Buffer | String} cert\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.pfx = function (cert) {\n  if (typeof cert === 'object' && !Buffer.isBuffer(cert)) {\n    this._pfx = cert.pfx;\n    this._passphrase = cert.passphrase;\n  } else {\n    this._pfx = cert;\n  }\n\n  return this;\n};\n\n/**\n * Set the client certificate option for https request.\n *\n * @param {Buffer | String} cert\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.cert = function (cert) {\n  this._cert = cert;\n  return this;\n};\n\n/**\n * Do not reject expired or invalid TLS certs.\n * sets `rejectUnauthorized=true`. Be warned that this allows MITM attacks.\n *\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype.disableTLSCerts = function () {\n  this._disableTLSCerts = true;\n  return this;\n};\n\n/**\n * Return an http[s] request.\n *\n * @return {OutgoingMessage}\n * @api private\n */\n\n// eslint-disable-next-line complexity\nRequest.prototype.request = function () {\n  if (this.req) return this.req;\n\n  const options = {};\n\n  try {\n    const query = qs.stringify(this.qs, {\n      indices: false,\n      strictNullHandling: true\n    });\n    if (query) {\n      this.qs = {};\n      this._query.push(query);\n    }\n\n    this._finalizeQueryString();\n  } catch (err) {\n    return this.emit('error', err);\n  }\n\n  let { url } = this;\n  const retries = this._retries;\n\n  // Capture backticks as-is from the final query string built above.\n  // Note: this'll only find backticks entered in req.query(String)\n  // calls, because qs.stringify unconditionally encodes backticks.\n  let queryStringBackticks;\n  if (url.includes('`')) {\n    const queryStartIndex = url.indexOf('?');\n\n    if (queryStartIndex !== -1) {\n      const queryString = url.slice(queryStartIndex + 1);\n      queryStringBackticks = queryString.match(/`|%60/g);\n    }\n  }\n\n  // default to http://\n  if (url.indexOf('http') !== 0) url = `http://${url}`;\n  url = parse(url);\n\n  // See https://github.com/visionmedia/superagent/issues/1367\n  if (queryStringBackticks) {\n    let i = 0;\n    url.query = url.query.replace(/%60/g, () => queryStringBackticks[i++]);\n    url.search = `?${url.query}`;\n    url.path = url.pathname + url.search;\n  }\n\n  // support unix sockets\n  if (/^https?\\+unix:/.test(url.protocol) === true) {\n    // get the protocol\n    url.protocol = `${url.protocol.split('+')[0]}:`;\n\n    // get the socket, path\n    const unixParts = url.path.match(/^([^/]+)(.+)$/);\n    options.socketPath = unixParts[1].replace(/%2F/g, '/');\n    url.path = unixParts[2];\n  }\n\n  // Override IP address of a hostname\n  if (this._connectOverride) {\n    const { hostname } = url;\n    const match =\n      hostname in this._connectOverride\n        ? this._connectOverride[hostname]\n        : this._connectOverride['*'];\n    if (match) {\n      // backup the real host\n      if (!this._header.host) {\n        this.set('host', url.host);\n      }\n\n      let newHost;\n      let newPort;\n\n      if (typeof match === 'object') {\n        newHost = match.host;\n        newPort = match.port;\n      } else {\n        newHost = match;\n        newPort = url.port;\n      }\n\n      // wrap [ipv6]\n      url.host = /:/.test(newHost) ? `[${newHost}]` : newHost;\n      if (newPort) {\n        url.host += `:${newPort}`;\n        url.port = newPort;\n      }\n\n      url.hostname = newHost;\n    }\n  }\n\n  // options\n  options.method = this.method;\n  options.port = url.port;\n  options.path = url.path;\n  options.host = url.hostname;\n  options.ca = this._ca;\n  options.key = this._key;\n  options.pfx = this._pfx;\n  options.cert = this._cert;\n  options.passphrase = this._passphrase;\n  options.agent = this._agent;\n  options.lookup = this._lookup;\n  options.rejectUnauthorized =\n    typeof this._disableTLSCerts === 'boolean'\n      ? !this._disableTLSCerts\n      : process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0';\n\n  // Allows request.get('https://1.2.3.4/').set('Host', 'example.com')\n  if (this._header.host) {\n    options.servername = this._header.host.replace(/:\\d+$/, '');\n  }\n\n  if (\n    this._trustLocalhost &&\n    /^(?:localhost|127\\.0\\.0\\.\\d+|(0*:)+:0*1)$/.test(url.hostname)\n  ) {\n    options.rejectUnauthorized = false;\n  }\n\n  // initiate request\n  const module_ = this._enableHttp2\n    ? exports.protocols['http2:'].setProtocol(url.protocol)\n    : exports.protocols[url.protocol];\n\n  // request\n  this.req = module_.request(options);\n  const { req } = this;\n\n  // set tcp no delay\n  req.setNoDelay(true);\n\n  if (options.method !== 'HEAD') {\n    req.setHeader('Accept-Encoding', 'gzip, deflate');\n  }\n\n  this.protocol = url.protocol;\n  this.host = url.host;\n\n  // expose events\n  req.once('drain', () => {\n    this.emit('drain');\n  });\n\n  req.on('error', (error) => {\n    // flag abortion here for out timeouts\n    // because node will emit a faux-error \"socket hang up\"\n    // when request is aborted before a connection is made\n    if (this._aborted) return;\n    // if not the same, we are in the **old** (cancelled) request,\n    // so need to continue (same as for above)\n    if (this._retries !== retries) return;\n    // if we've received a response then we don't want to let\n    // an error in the request blow up the response\n    if (this.response) return;\n    this.callback(error);\n  });\n\n  // auth\n  if (url.auth) {\n    const auth = url.auth.split(':');\n    this.auth(auth[0], auth[1]);\n  }\n\n  if (this.username && this.password) {\n    this.auth(this.username, this.password);\n  }\n\n  for (const key in this.header) {\n    if (hasOwn(this.header, key)) req.setHeader(key, this.header[key]);\n  }\n\n  // add cookies\n  if (this.cookies) {\n    if (hasOwn(this._header, 'cookie')) {\n      // merge\n      const temporaryJar = new CookieJar.CookieJar();\n      temporaryJar.setCookies(this._header.cookie.split(';'));\n      temporaryJar.setCookies(this.cookies.split(';'));\n      req.setHeader(\n        'Cookie',\n        temporaryJar.getCookies(CookieJar.CookieAccessInfo.All).toValueString()\n      );\n    } else {\n      req.setHeader('Cookie', this.cookies);\n    }\n  }\n\n  return req;\n};\n\n/**\n * Invoke the callback with `err` and `res`\n * and handle arity check.\n *\n * @param {Error} err\n * @param {Response} res\n * @api private\n */\n\nRequest.prototype.callback = function (error, res) {\n  if (this._shouldRetry(error, res)) {\n    return this._retry();\n  }\n\n  // Avoid the error which is emitted from 'socket hang up' to cause the fn undefined error on JS runtime.\n  const fn = this._callback || noop;\n  this.clearTimeout();\n  if (this.called) return console.warn('superagent: double callback bug');\n  this.called = true;\n\n  if (!error) {\n    try {\n      if (!this._isResponseOK(res)) {\n        let message = 'Unsuccessful HTTP response';\n        if (res) {\n          message = http.STATUS_CODES[res.status] || message;\n        }\n\n        error = new Error(message);\n        error.status = res ? res.status : undefined;\n      }\n    } catch (err) {\n      error = err;\n    }\n  }\n\n  // It's important that the callback is called outside try/catch\n  // to avoid double callback\n  if (!error) {\n    return fn(null, res);\n  }\n\n  error.response = res;\n  if (this._maxRetries) error.retries = this._retries - 1;\n\n  // only emit error event if there is a listener\n  // otherwise we assume the callback to `.end()` will get the error\n  if (error && this.listeners('error').length > 0) {\n    this.emit('error', error);\n  }\n\n  fn(error, res);\n};\n\n/**\n * Check if `obj` is a host object,\n *\n * @param {Object} obj host object\n * @return {Boolean} is a host object\n * @api private\n */\nRequest.prototype._isHost = function (object) {\n  return (\n    Buffer.isBuffer(object) ||\n    object instanceof Stream ||\n    object instanceof FormData\n  );\n};\n\n/**\n * Initiate request, invoking callback `fn(err, res)`\n * with an instanceof `Response`.\n *\n * @param {Function} fn\n * @return {Request} for chaining\n * @api public\n */\n\nRequest.prototype._emitResponse = function (body, files) {\n  const response = new Response(this);\n  this.response = response;\n  response.redirects = this._redirectList;\n  if (undefined !== body) {\n    response.body = body;\n  }\n\n  response.files = files;\n  if (this._endCalled) {\n    response.pipe = function () {\n      throw new Error(\n        \"end() has already been called, so it's too late to start piping\"\n      );\n    };\n  }\n\n  this.emit('response', response);\n  return response;\n};\n\nRequest.prototype.end = function (fn) {\n  this.request();\n  debug('%s %s', this.method, this.url);\n\n  if (this._endCalled) {\n    throw new Error(\n      '.end() was called twice. This is not supported in superagent'\n    );\n  }\n\n  this._endCalled = true;\n\n  // store callback\n  this._callback = fn || noop;\n\n  this._end();\n};\n\nRequest.prototype._end = function () {\n  if (this._aborted)\n    return this.callback(\n      new Error('The request has been aborted even before .end() was called')\n    );\n\n  let data = this._data;\n  const { req } = this;\n  const { method } = this;\n\n  this._setTimeouts();\n\n  // body\n  if (method !== 'HEAD' && !req._headerSent) {\n    // serialize stuff\n    if (typeof data !== 'string') {\n      let contentType = req.getHeader('Content-Type');\n      // Parse out just the content type from the header (ignore the charset)\n      if (contentType) contentType = contentType.split(';')[0];\n      let serialize = this._serializer || exports.serialize[contentType];\n      if (!serialize && isJSON(contentType)) {\n        serialize = exports.serialize['application/json'];\n      }\n\n      if (serialize) data = serialize(data);\n    }\n\n    // content-length\n    if (data && !req.getHeader('Content-Length')) {\n      req.setHeader(\n        'Content-Length',\n        Buffer.isBuffer(data) ? data.length : Buffer.byteLength(data)\n      );\n    }\n  }\n\n  // response\n  // eslint-disable-next-line complexity\n  req.once('response', (res) => {\n    debug('%s %s -> %s', this.method, this.url, res.statusCode);\n\n    if (this._responseTimeoutTimer) {\n      clearTimeout(this._responseTimeoutTimer);\n    }\n\n    if (this.piped) {\n      return;\n    }\n\n    const max = this._maxRedirects;\n    const mime = utils.type(res.headers['content-type'] || '') || 'text/plain';\n    let type = mime.split('/')[0];\n    if (type) type = type.toLowerCase().trim();\n    const multipart = type === 'multipart';\n    const redirect = isRedirect(res.statusCode);\n    const responseType = this._responseType;\n\n    this.res = res;\n\n    // redirect\n    if (redirect && this._redirects++ !== max) {\n      return this._redirect(res);\n    }\n\n    if (this.method === 'HEAD') {\n      this.emit('end');\n      this.callback(null, this._emitResponse());\n      return;\n    }\n\n    // zlib support\n    if (this._shouldUnzip(res)) {\n      unzip(req, res);\n    }\n\n    let buffer = this._buffer;\n    if (buffer === undefined && mime in exports.buffer) {\n      buffer = Boolean(exports.buffer[mime]);\n    }\n\n    let parser = this._parser;\n    if (undefined === buffer && parser) {\n      console.warn(\n        \"A custom superagent parser has been set, but buffering strategy for the parser hasn't been configured. Call `req.buffer(true or false)` or set `superagent.buffer[mime] = true or false`\"\n      );\n      buffer = true;\n    }\n\n    if (!parser) {\n      if (responseType) {\n        parser = exports.parse.image; // It's actually a generic Buffer\n        buffer = true;\n      } else if (multipart) {\n        const form = formidable();\n        parser = form.parse.bind(form);\n        buffer = true;\n      } else if (isImageOrVideo(mime)) {\n        parser = exports.parse.image;\n        buffer = true; // For backwards-compatibility buffering default is ad-hoc MIME-dependent\n      } else if (exports.parse[mime]) {\n        parser = exports.parse[mime];\n      } else if (type === 'text') {\n        parser = exports.parse.text;\n        buffer = buffer !== false;\n\n        // everyone wants their own white-labeled json\n      } else if (isJSON(mime)) {\n        parser = exports.parse['application/json'];\n        buffer = buffer !== false;\n      } else if (buffer) {\n        parser = exports.parse.text;\n      } else if (undefined === buffer) {\n        parser = exports.parse.image; // It's actually a generic Buffer\n        buffer = true;\n      }\n    }\n\n    // by default only buffer text/*, json and messed up thing from hell\n    if ((undefined === buffer && isText(mime)) || isJSON(mime)) {\n      buffer = true;\n    }\n\n    this._resBuffered = buffer;\n    let parserHandlesEnd = false;\n    if (buffer) {\n      // Protectiona against zip bombs and other nuisance\n      let responseBytesLeft = this._maxResponseSize || 200_000_000;\n      res.on('data', (buf) => {\n        responseBytesLeft -= buf.byteLength || buf.length > 0 ? buf.length : 0;\n        if (responseBytesLeft < 0) {\n          // This will propagate through error event\n          const error = new Error('Maximum response size reached');\n          error.code = 'ETOOLARGE';\n          // Parsers aren't required to observe error event,\n          // so would incorrectly report success\n          parserHandlesEnd = false;\n          // Will not emit error event\n          res.destroy(error);\n          // so we do callback now\n          this.callback(error, null);\n        }\n      });\n    }\n\n    if (parser) {\n      try {\n        // Unbuffered parsers are supposed to emit response early,\n        // which is weird BTW, because response.body won't be there.\n        parserHandlesEnd = buffer;\n\n        parser(res, (error, object, files) => {\n          if (this.timedout) {\n            // Timeout has already handled all callbacks\n            return;\n          }\n\n          // Intentional (non-timeout) abort is supposed to preserve partial response,\n          // even if it doesn't parse.\n          if (error && !this._aborted) {\n            return this.callback(error);\n          }\n\n          if (parserHandlesEnd) {\n            this.emit('end');\n            this.callback(null, this._emitResponse(object, files));\n          }\n        });\n      } catch (err) {\n        this.callback(err);\n        return;\n      }\n    }\n\n    this.res = res;\n\n    // unbuffered\n    if (!buffer) {\n      debug('unbuffered %s %s', this.method, this.url);\n      this.callback(null, this._emitResponse());\n      if (multipart) return; // allow multipart to handle end event\n      res.once('end', () => {\n        debug('end %s %s', this.method, this.url);\n        this.emit('end');\n      });\n      return;\n    }\n\n    // terminating events\n    res.once('error', (error) => {\n      parserHandlesEnd = false;\n      this.callback(error, null);\n    });\n    if (!parserHandlesEnd)\n      res.once('end', () => {\n        debug('end %s %s', this.method, this.url);\n        // TODO: unless buffering emit earlier to stream\n        this.emit('end');\n        this.callback(null, this._emitResponse());\n      });\n  });\n\n  this.emit('request', this);\n\n  const getProgressMonitor = () => {\n    const lengthComputable = true;\n    const total = req.getHeader('Content-Length');\n    let loaded = 0;\n\n    const progress = new Stream.Transform();\n    progress._transform = (chunk, encoding, callback) => {\n      loaded += chunk.length;\n      this.emit('progress', {\n        direction: 'upload',\n        lengthComputable,\n        loaded,\n        total\n      });\n      callback(null, chunk);\n    };\n\n    return progress;\n  };\n\n  const bufferToChunks = (buffer) => {\n    const chunkSize = 16 * 1024; // default highWaterMark value\n    const chunking = new Stream.Readable();\n    const totalLength = buffer.length;\n    const remainder = totalLength % chunkSize;\n    const cutoff = totalLength - remainder;\n\n    for (let i = 0; i < cutoff; i += chunkSize) {\n      const chunk = buffer.slice(i, i + chunkSize);\n      chunking.push(chunk);\n    }\n\n    if (remainder > 0) {\n      const remainderBuffer = buffer.slice(-remainder);\n      chunking.push(remainderBuffer);\n    }\n\n    chunking.push(null); // no more data\n\n    return chunking;\n  };\n\n  // if a FormData instance got created, then we send that as the request body\n  const formData = this._formData;\n  if (formData) {\n    // set headers\n    const headers = formData.getHeaders();\n    for (const i in headers) {\n      if (hasOwn(headers, i)) {\n        debug('setting FormData header: \"%s: %s\"', i, headers[i]);\n        req.setHeader(i, headers[i]);\n      }\n    }\n\n    // attempt to get \"Content-Length\" header\n    formData.getLength((error, length) => {\n      // TODO: Add chunked encoding when no length (if err)\n      if (error) debug('formData.getLength had error', error, length);\n\n      debug('got FormData Content-Length: %s', length);\n      if (typeof length === 'number') {\n        req.setHeader('Content-Length', length);\n      }\n\n      formData.pipe(getProgressMonitor()).pipe(req);\n    });\n  } else if (Buffer.isBuffer(data)) {\n    bufferToChunks(data).pipe(getProgressMonitor()).pipe(req);\n  } else {\n    req.end(data);\n  }\n};\n\n// Check whether response has a non-0-sized gzip-encoded body\nRequest.prototype._shouldUnzip = (res) => {\n  if (res.statusCode === 204 || res.statusCode === 304) {\n    // These aren't supposed to have any body\n    return false;\n  }\n\n  // header content is a string, and distinction between 0 and no information is crucial\n  if (res.headers['content-length'] === '0') {\n    // We know that the body is empty (unfortunately, this check does not cover chunked encoding)\n    return false;\n  }\n\n  // console.log(res);\n  return /^\\s*(?:deflate|gzip)\\s*$/.test(res.headers['content-encoding']);\n};\n\n/**\n * Overrides DNS for selected hostnames. Takes object mapping hostnames to IP addresses.\n *\n * When making a request to a URL with a hostname exactly matching a key in the object,\n * use the given IP address to connect, instead of using DNS to resolve the hostname.\n *\n * A special host `*` matches every hostname (keep redirects in mind!)\n *\n *      request.connect({\n *        'test.example.com': '127.0.0.1',\n *        'ipv6.example.com': '::1',\n *      })\n */\nRequest.prototype.connect = function (connectOverride) {\n  if (typeof connectOverride === 'string') {\n    this._connectOverride = { '*': connectOverride };\n  } else if (typeof connectOverride === 'object') {\n    this._connectOverride = connectOverride;\n  } else {\n    this._connectOverride = undefined;\n  }\n\n  return this;\n};\n\nRequest.prototype.trustLocalhost = function (toggle) {\n  this._trustLocalhost = toggle === undefined ? true : toggle;\n  return this;\n};\n\n// generate HTTP verb methods\nif (!methods.includes('del')) {\n  // create a copy so we don't cause conflicts with\n  // other packages using the methods package and\n  // npm 3.x\n  methods = [...methods];\n  methods.push('del');\n}\n\nfor (let method of methods) {\n  const name = method;\n  method = method === 'del' ? 'delete' : method;\n\n  method = method.toUpperCase();\n  request[name] = (url, data, fn) => {\n    const request_ = request(method, url);\n    if (typeof data === 'function') {\n      fn = data;\n      data = null;\n    }\n\n    if (data) {\n      if (method === 'GET' || method === 'HEAD') {\n        request_.query(data);\n      } else {\n        request_.send(data);\n      }\n    }\n\n    if (fn) request_.end(fn);\n    return request_;\n  };\n}\n\n/**\n * Check if `mime` is text and should be buffered.\n *\n * @param {String} mime\n * @return {Boolean}\n * @api public\n */\n\nfunction isText(mime) {\n  const parts = mime.split('/');\n  let type = parts[0];\n  if (type) type = type.toLowerCase().trim();\n  let subtype = parts[1];\n  if (subtype) subtype = subtype.toLowerCase().trim();\n\n  return type === 'text' || subtype === 'x-www-form-urlencoded';\n}\n\nfunction isImageOrVideo(mime) {\n  let type = mime.split('/')[0];\n  if (type) type = type.toLowerCase().trim();\n\n  return type === 'image' || type === 'video';\n}\n\n/**\n * Check if `mime` is json or has +json structured syntax suffix.\n *\n * @param {String} mime\n * @return {Boolean}\n * @api private\n */\n\nfunction isJSON(mime) {\n  // should match /json or +json\n  // but not /json-seq\n  return /[/+]json($|[^-\\w])/i.test(mime);\n}\n\n/**\n * Check if we should follow the redirect `code`.\n *\n * @param {Number} code\n * @return {Boolean}\n * @api private\n */\n\nfunction isRedirect(code) {\n  return [301, 302, 303, 305, 307, 308].includes(code);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AACA;AACA;AAEA;AACA,eAAmCA,OAAO,CAAC,KAAD,CAA1C;AAAA,IAAQC,KAAR,YAAQA,KAAR;AAAA,IAAeC,MAAf,YAAeA,MAAf;AAAA,IAAuBC,OAAvB,YAAuBA,OAAvB;;AACA,IAAMC,MAAM,GAAGJ,OAAO,CAAC,QAAD,CAAtB;;AACA,IAAMK,KAAK,GAAGL,OAAO,CAAC,OAAD,CAArB;;AACA,IAAMM,IAAI,GAAGN,OAAO,CAAC,MAAD,CAApB;;AACA,IAAMO,EAAE,GAAGP,OAAO,CAAC,IAAD,CAAlB;;AACA,IAAMQ,IAAI,GAAGR,OAAO,CAAC,MAAD,CAApB;;AACA,IAAMS,IAAI,GAAGT,OAAO,CAAC,MAAD,CAApB;;AACA,IAAMU,EAAE,GAAGV,OAAO,CAAC,IAAD,CAAlB;;AACA,IAAMW,IAAI,GAAGX,OAAO,CAAC,MAAD,CAApB;;AACA,IAAIY,OAAO,GAAGZ,OAAO,CAAC,SAAD,CAArB;;AACA,IAAMa,QAAQ,GAAGb,OAAO,CAAC,WAAD,CAAxB;;AACA,IAAMc,UAAU,GAAGd,OAAO,CAAC,YAAD,CAA1B;;AACA,IAAMe,KAAK,GAAGf,OAAO,CAAC,OAAD,CAAP,CAAiB,YAAjB,CAAd;;AACA,IAAMgB,SAAS,GAAGhB,OAAO,CAAC,WAAD,CAAzB;;AACA,IAAMiB,SAAS,GAAGjB,OAAO,CAAC,sBAAD,CAAzB;;AACA,IAAMkB,aAAa,GAAGlB,OAAO,CAAC,qBAAD,CAA7B;;AAEA,IAAMmB,KAAK,GAAGnB,OAAO,CAAC,UAAD,CAArB;;AACA,IAAMoB,WAAW,GAAGpB,OAAO,CAAC,iBAAD,CAA3B;;AACA,gBAAkBA,OAAO,CAAC,SAAD,CAAzB;AAAA,IAAQqB,KAAR,aAAQA,KAAR;;AACA,IAAMC,QAAQ,GAAGtB,OAAO,CAAC,YAAD,CAAxB;;AAEA,IAAQuB,KAAR,GAA0BJ,KAA1B,CAAQI,KAAR;AAAA,IAAeC,MAAf,GAA0BL,KAA1B,CAAeK,MAAf;AAEA,IAAIC,KAAJ;AAEA,IAAIR,SAAS,CAACS,OAAO,CAACC,OAAT,EAAkB,UAAlB,CAAb,EAA4CF,KAAK,GAAGzB,OAAO,CAAC,gBAAD,CAAf;;AAE5C,SAAS4B,OAAT,CAAiBC,MAAjB,EAAyBC,GAAzB,EAA8B;EAC5B;EACA,IAAI,OAAOA,GAAP,KAAe,UAAnB,EAA+B;IAC7B,OAAO,IAAIC,OAAO,CAACC,OAAZ,CAAoB,KAApB,EAA2BH,MAA3B,EAAmCI,GAAnC,CAAuCH,GAAvC,CAAP;EACD,CAJ2B,CAM5B;;;EACA,IAAII,SAAS,CAACC,MAAV,KAAqB,CAAzB,EAA4B;IAC1B,OAAO,IAAIJ,OAAO,CAACC,OAAZ,CAAoB,KAApB,EAA2BH,MAA3B,CAAP;EACD;;EAED,OAAO,IAAIE,OAAO,CAACC,OAAZ,CAAoBH,MAApB,EAA4BC,GAA5B,CAAP;AACD;;AAEDM,MAAM,CAACL,OAAP,GAAiBH,OAAjB;AACAG,OAAO,GAAGK,MAAM,CAACL,OAAjB;AAEA;AACA;AACA;;AAEAA,OAAO,CAACC,OAAR,GAAkBA,OAAlB;AAEA;AACA;AACA;;AAEAD,OAAO,CAACM,KAAR,GAAgBrC,OAAO,CAAC,SAAD,CAAvB;AAEA;AACA;AACA;;AAEA,SAASsC,IAAT,GAAgB,CAAE;AAElB;AACA;AACA;;;AAEAP,OAAO,CAACT,QAAR,GAAmBA,QAAnB;AAEA;AACA;AACA;;AAEAX,IAAI,CAAC4B,MAAL,CACE;EACE,qCAAqC,CAAC,MAAD,EAAS,YAAT,EAAuB,WAAvB;AADvC,CADF,EAIE,IAJF;AAOA;AACA;AACA;;AAEAR,OAAO,CAACS,SAAR,GAAoB;EAClB,SAASlC,IADS;EAElB,UAAUD,KAFQ;EAGlB,UAAUoB;AAHQ,CAApB;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEAM,OAAO,CAACU,SAAR,GAAoB;EAClB,qCAAqC/B,EAAE,CAACgC,SADtB;EAElB,oBAAoBxB;AAFF,CAApB;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEAa,OAAO,CAAC9B,KAAR,GAAgBD,OAAO,CAAC,WAAD,CAAvB;AAEA;AACA;AACA;AACA;AACA;AACA;;AACA+B,OAAO,CAACY,MAAR,GAAiB,EAAjB;AAEA;AACA;AACA;AACA;AACA;AACA;;AACA,SAASC,YAAT,CAAsBC,QAAtB,EAAgC;EAC9BA,QAAQ,CAACC,OAAT,GAAmB,CACjB;EADiB,CAAnB;EAGAD,QAAQ,CAACE,MAAT,GAAkB,CAChB;EADgB,CAAlB;AAGD;AAED;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAASf,OAAT,CAAiBH,MAAjB,EAAyBC,GAAzB,EAA8B;EAC5B1B,MAAM,CAAC4C,IAAP,CAAY,IAAZ;EACA,IAAI,OAAOlB,GAAP,KAAe,QAAnB,EAA6BA,GAAG,GAAG5B,MAAM,CAAC4B,GAAD,CAAZ;EAC7B,KAAKmB,YAAL,GAAoBC,OAAO,CAACxB,OAAO,CAACyB,GAAR,CAAYC,UAAb,CAA3B,CAH4B,CAGyB;;EACrD,KAAKC,MAAL,GAAc,KAAd;EACA,KAAKC,SAAL,GAAiB,IAAjB;EACA,KAAKzB,MAAL,GAAcA,MAAd;EACA,KAAKC,GAAL,GAAWA,GAAX;;EACAc,YAAY,CAAC,IAAD,CAAZ;;EACA,KAAKW,QAAL,GAAgB,IAAhB;EACA,KAAKC,UAAL,GAAkB,CAAlB;EACA,KAAKC,SAAL,CAAe5B,MAAM,KAAK,MAAX,GAAoB,CAApB,GAAwB,CAAvC;EACA,KAAK6B,OAAL,GAAe,EAAf;EACA,KAAKhD,EAAL,GAAU,EAAV;EACA,KAAKiD,MAAL,GAAc,EAAd;EACA,KAAKC,KAAL,GAAa,KAAKD,MAAlB,CAf4B,CAeF;;EAC1B,KAAKE,aAAL,GAAqB,EAArB;EACA,KAAKC,cAAL,GAAsB,KAAtB;EACA,KAAKC,OAAL,GAAeC,SAAf;EACA,KAAKC,IAAL,CAAU,KAAV,EAAiB,KAAKC,YAAL,CAAkBC,IAAlB,CAAuB,IAAvB,CAAjB;AACD;AAED;AACA;AACA;AACA;;;AACA1D,IAAI,CAAC2D,QAAL,CAAcpC,OAAd,EAAuB5B,MAAvB;AAEAmB,KAAK,CAACS,OAAO,CAACqC,SAAT,EAAoBjD,WAAW,CAACiD,SAAhC,CAAL;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEArC,OAAO,CAACqC,SAAR,CAAkB5C,KAAlB,GAA0B,UAAU6C,IAAV,EAAgB;EACxC,IAAIvC,OAAO,CAACS,SAAR,CAAkB,QAAlB,MAAgCwB,SAApC,EAA+C;IAC7C,MAAM,IAAIO,KAAJ,CACJ,4DADI,CAAN;EAGD;;EAED,KAAKtB,YAAL,GAAoBqB,IAAI,KAAKN,SAAT,GAAqB,IAArB,GAA4BM,IAAhD;EACA,OAAO,IAAP;AACD,CATD;AAWA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAtC,OAAO,CAACqC,SAAR,CAAkBG,MAAlB,GAA2B,UAAUC,KAAV,EAAiBC,IAAjB,EAAuBC,OAAvB,EAAgC;EAAA;;EACzD,IAAID,IAAJ,EAAU;IACR,IAAI,KAAKE,KAAT,EAAgB;MACd,MAAM,IAAIL,KAAJ,CAAU,4CAAV,CAAN;IACD;;IAED,IAAIM,CAAC,GAAGF,OAAO,IAAI,EAAnB;;IACA,IAAI,OAAOA,OAAP,KAAmB,QAAvB,EAAiC;MAC/BE,CAAC,GAAG;QAAEC,QAAQ,EAAEH;MAAZ,CAAJ;IACD;;IAED,IAAI,OAAOD,IAAP,KAAgB,QAApB,EAA8B;MAC5B,IAAI,CAACG,CAAC,CAACC,QAAP,EAAiBD,CAAC,CAACC,QAAF,GAAaJ,IAAb;MACjB3D,KAAK,CAAC,gDAAD,EAAmD2D,IAAnD,CAAL;MACAA,IAAI,GAAGnE,EAAE,CAACwE,gBAAH,CAAoBL,IAApB,CAAP;MACAA,IAAI,CAACM,EAAL,CAAQ,OAAR,EAAiB,UAACC,KAAD,EAAW;QAC1B,IAAMC,QAAQ,GAAG,KAAI,CAACC,YAAL,EAAjB;;QACAD,QAAQ,CAACE,IAAT,CAAc,OAAd,EAAuBH,KAAvB;MACD,CAHD;IAID,CARD,MAQO,IAAI,CAACJ,CAAC,CAACC,QAAH,IAAeJ,IAAI,CAACW,IAAxB,EAA8B;MACnCR,CAAC,CAACC,QAAF,GAAaJ,IAAI,CAACW,IAAlB;IACD;;IAED,KAAKF,YAAL,GAAoBG,MAApB,CAA2Bb,KAA3B,EAAkCC,IAAlC,EAAwCG,CAAxC;EACD;;EAED,OAAO,IAAP;AACD,CA3BD;;AA6BA7C,OAAO,CAACqC,SAAR,CAAkBc,YAAlB,GAAiC,YAAY;EAAA;;EAC3C,IAAI,CAAC,KAAK7B,SAAV,EAAqB;IACnB,KAAKA,SAAL,GAAiB,IAAIzC,QAAJ,EAAjB;;IACA,KAAKyC,SAAL,CAAe0B,EAAf,CAAkB,OAAlB,EAA2B,UAACC,KAAD,EAAW;MACpClE,KAAK,CAAC,gBAAD,EAAmBkE,KAAnB,CAAL;;MACA,IAAI,MAAI,CAACM,MAAT,EAAiB;QACf;QACA;QACA;MACD;;MAED,MAAI,CAACC,QAAL,CAAcP,KAAd;;MACA,MAAI,CAACQ,KAAL;IACD,CAVD;EAWD;;EAED,OAAO,KAAKnC,SAAZ;AACD,CAjBD;AAmBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAtB,OAAO,CAACqC,SAAR,CAAkBhC,KAAlB,GAA0B,UAAUA,KAAV,EAAiB;EACzC,IAAIH,SAAS,CAACC,MAAV,KAAqB,CAAzB,EAA4B,OAAO,KAAKkB,MAAZ;EAC5B,KAAKA,MAAL,GAAchB,KAAd;EACA,OAAO,IAAP;AACD,CAJD;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAL,OAAO,CAACqC,SAAR,CAAkBqB,MAAlB,GAA2B,UAAUA,MAAV,EAAkB;EAC3C,IAAIxD,SAAS,CAACC,MAAV,KAAqB,CAAzB,EAA4B,OAAO,KAAK4B,OAAZ;EAC5B,KAAKA,OAAL,GAAe2B,MAAf;EACA,OAAO,IAAP;AACD,CAJD;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA1D,OAAO,CAACqC,SAAR,CAAkBsB,IAAlB,GAAyB,UAAUA,IAAV,EAAgB;EACvC,OAAO,KAAKC,GAAL,CACL,cADK,EAELD,IAAI,CAACE,QAAL,CAAc,GAAd,IAAqBF,IAArB,GAA4BhF,IAAI,CAACmF,OAAL,CAAaH,IAAb,CAFvB,CAAP;AAID,CALD;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA3D,OAAO,CAACqC,SAAR,CAAkB0B,MAAlB,GAA2B,UAAUJ,IAAV,EAAgB;EACzC,OAAO,KAAKC,GAAL,CAAS,QAAT,EAAmBD,IAAI,CAACE,QAAL,CAAc,GAAd,IAAqBF,IAArB,GAA4BhF,IAAI,CAACmF,OAAL,CAAaH,IAAb,CAA/C,CAAP;AACD,CAFD;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA3D,OAAO,CAACqC,SAAR,CAAkB2B,KAAlB,GAA0B,UAAUC,KAAV,EAAiB;EACzC,IAAI,OAAOA,KAAP,KAAiB,QAArB,EAA+B;IAC7B,KAAKtC,MAAL,CAAYuC,IAAZ,CAAiBD,KAAjB;EACD,CAFD,MAEO;IACLE,MAAM,CAACC,MAAP,CAAc,KAAK1F,EAAnB,EAAuBuF,KAAvB;EACD;;EAED,OAAO,IAAP;AACD,CARD;AAUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAjE,OAAO,CAACqC,SAAR,CAAkBgC,KAAlB,GAA0B,UAAUC,IAAV,EAAgBC,QAAhB,EAA0B;EAClD,IAAM1D,QAAQ,GAAG,KAAKjB,OAAL,EAAjB;;EACA,IAAI,CAAC,KAAKkC,cAAV,EAA0B;IACxB,KAAKA,cAAL,GAAsB,IAAtB;EACD;;EAED,OAAOjB,QAAQ,CAACwD,KAAT,CAAeC,IAAf,EAAqBC,QAArB,CAAP;AACD,CAPD;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAvE,OAAO,CAACqC,SAAR,CAAkBmC,IAAlB,GAAyB,UAAUC,MAAV,EAAkB9B,OAAlB,EAA2B;EAClD,KAAK+B,KAAL,GAAa,IAAb,CADkD,CAC/B;;EACnB,KAAK/D,MAAL,CAAY,KAAZ;EACA,KAAKV,GAAL;EACA,OAAO,KAAK0E,aAAL,CAAmBF,MAAnB,EAA2B9B,OAA3B,CAAP;AACD,CALD;;AAOA3C,OAAO,CAACqC,SAAR,CAAkBsC,aAAlB,GAAkC,UAAUF,MAAV,EAAkB9B,OAAlB,EAA2B;EAAA;;EAC3D,KAAKiC,GAAL,CAAS3C,IAAT,CAAc,UAAd,EAA0B,UAAC4C,GAAD,EAAS;IACjC;IACA,IACEC,UAAU,CAACD,GAAG,CAACE,UAAL,CAAV,IACA,MAAI,CAACvD,UAAL,OAAsB,MAAI,CAACwD,aAF7B,EAGE;MACA,OAAO,MAAI,CAACC,SAAL,CAAeJ,GAAf,MAAwB,MAAxB,GACH,MAAI,CAACF,aAAL,CAAmBF,MAAnB,EAA2B9B,OAA3B,CADG,GAEHX,SAFJ;IAGD;;IAED,MAAI,CAAC6C,GAAL,GAAWA,GAAX;;IACA,MAAI,CAACK,aAAL;;IACA,IAAI,MAAI,CAACC,QAAT,EAAmB;;IAEnB,IAAI,MAAI,CAACC,YAAL,CAAkBP,GAAlB,CAAJ,EAA4B;MAC1B,IAAMQ,WAAW,GAAG7G,IAAI,CAAC8G,WAAL,EAApB;MACAD,WAAW,CAACrC,EAAZ,CAAe,OAAf,EAAwB,UAACC,KAAD,EAAW;QACjC,IAAIA,KAAK,IAAIA,KAAK,CAACsC,IAAN,KAAe,aAA5B,EAA2C;UACzC;UACAd,MAAM,CAACrB,IAAP,CAAY,KAAZ;UACA;QACD;;QAEDqB,MAAM,CAACrB,IAAP,CAAY,OAAZ,EAAqBH,KAArB;MACD,CARD;MASA4B,GAAG,CAACL,IAAJ,CAASa,WAAT,EAAsBb,IAAtB,CAA2BC,MAA3B,EAAmC9B,OAAnC;IACD,CAZD,MAYO;MACLkC,GAAG,CAACL,IAAJ,CAASC,MAAT,EAAiB9B,OAAjB;IACD;;IAEDkC,GAAG,CAAC5C,IAAJ,CAAS,KAAT,EAAgB,YAAM;MACpB,MAAI,CAACmB,IAAL,CAAU,KAAV;IACD,CAFD;EAGD,CAlCD;EAmCA,OAAOqB,MAAP;AACD,CArCD;AAuCA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAzE,OAAO,CAACqC,SAAR,CAAkB1B,MAAlB,GAA2B,UAAUsD,KAAV,EAAiB;EAC1C,KAAKuB,OAAL,GAAevB,KAAK,KAAK,KAAzB;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAjE,OAAO,CAACqC,SAAR,CAAkB4C,SAAlB,GAA8B,UAAUJ,GAAV,EAAe;EAC3C,IAAI/E,GAAG,GAAG+E,GAAG,CAACY,OAAJ,CAAYC,QAAtB;;EACA,IAAI,CAAC5F,GAAL,EAAU;IACR,OAAO,KAAK0D,QAAL,CAAc,IAAIjB,KAAJ,CAAU,iCAAV,CAAd,EAA4DsC,GAA5D,CAAP;EACD;;EAED9F,KAAK,CAAC,mBAAD,EAAsB,KAAKe,GAA3B,EAAgCA,GAAhC,CAAL,CAN2C,CAQ3C;;EACAA,GAAG,GAAG3B,OAAO,CAAC,KAAK2B,GAAN,EAAWA,GAAX,CAAb,CAT2C,CAW3C;EACA;;EACA+E,GAAG,CAACc,MAAJ;EAEA,IAAIF,OAAO,GAAG,KAAKb,GAAL,CAASgB,UAAT,GAAsB,KAAKhB,GAAL,CAASgB,UAAT,EAAtB,GAA8C,KAAKhB,GAAL,CAASiB,QAArE;EAEA,IAAMC,aAAa,GAAG7H,KAAK,CAAC6B,GAAD,CAAL,CAAWiG,IAAX,KAAoB9H,KAAK,CAAC,KAAK6B,GAAN,CAAL,CAAgBiG,IAA1D,CAjB2C,CAmB3C;;EACA,IAAIlB,GAAG,CAACE,UAAJ,KAAmB,GAAnB,IAA0BF,GAAG,CAACE,UAAJ,KAAmB,GAAjD,EAAsD;IACpD;IACA;IACAU,OAAO,GAAGtG,KAAK,CAAC6G,WAAN,CAAkBP,OAAlB,EAA2BK,aAA3B,CAAV,CAHoD,CAKpD;;IACA,KAAKjG,MAAL,GAAc,KAAKA,MAAL,KAAgB,MAAhB,GAAyB,MAAzB,GAAkC,KAAhD,CANoD,CAQpD;;IACA,KAAK+C,KAAL,GAAa,IAAb;EACD,CA9B0C,CAgC3C;;;EACA,IAAIiC,GAAG,CAACE,UAAJ,KAAmB,GAAvB,EAA4B;IAC1B;IACA;IACAU,OAAO,GAAGtG,KAAK,CAAC6G,WAAN,CAAkBP,OAAlB,EAA2BK,aAA3B,CAAV,CAH0B,CAK1B;;IACA,KAAKjG,MAAL,GAAc,KAAd,CAN0B,CAQ1B;;IACA,KAAK+C,KAAL,GAAa,IAAb;EACD,CA3C0C,CA6C3C;EACA;;;EACA,OAAO6C,OAAO,CAACM,IAAf;EAEA,OAAO,KAAKnB,GAAZ;EACA,OAAO,KAAKtD,SAAZ,CAlD2C,CAoD3C;;EACAV,YAAY,CAAC,IAAD,CAAZ,CArD2C,CAuD3C;;;EACA,KAAKqF,UAAL,GAAkB,KAAlB;EACA,KAAKnG,GAAL,GAAWA,GAAX;EACA,KAAKpB,EAAL,GAAU,EAAV;EACA,KAAKiD,MAAL,CAAYxB,MAAZ,GAAqB,CAArB;EACA,KAAKyD,GAAL,CAAS6B,OAAT;EACA,KAAKrC,IAAL,CAAU,UAAV,EAAsByB,GAAtB;;EACA,KAAKhD,aAAL,CAAmBqC,IAAnB,CAAwB,KAAKpE,GAA7B;;EACA,KAAKG,GAAL,CAAS,KAAKiG,SAAd;EACA,OAAO,IAAP;AACD,CAjED;AAmEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAlG,OAAO,CAACqC,SAAR,CAAkB8D,IAAlB,GAAyB,UAAUC,IAAV,EAAgBC,IAAhB,EAAsB1D,OAAtB,EAA+B;EACtD,IAAIzC,SAAS,CAACC,MAAV,KAAqB,CAAzB,EAA4BkG,IAAI,GAAG,EAAP;;EAC5B,IAAI,QAAOA,IAAP,MAAgB,QAAhB,IAA4BA,IAAI,KAAK,IAAzC,EAA+C;IAC7C;IACA1D,OAAO,GAAG0D,IAAV;IACAA,IAAI,GAAG,EAAP;EACD;;EAED,IAAI,CAAC1D,OAAL,EAAc;IACZA,OAAO,GAAG;MAAEgB,IAAI,EAAE;IAAR,CAAV;EACD;;EAED,IAAM2C,OAAO,GAAG,SAAVA,OAAU,CAACC,MAAD;IAAA,OAAYC,MAAM,CAACC,IAAP,CAAYF,MAAZ,EAAoBG,QAApB,CAA6B,QAA7B,CAAZ;EAAA,CAAhB;;EAEA,OAAO,KAAKC,KAAL,CAAWP,IAAX,EAAiBC,IAAjB,EAAuB1D,OAAvB,EAAgC2D,OAAhC,CAAP;AACD,CAfD;AAiBA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAtG,OAAO,CAACqC,SAAR,CAAkBuE,EAAlB,GAAuB,UAAUC,IAAV,EAAgB;EACrC,KAAKC,GAAL,GAAWD,IAAX;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA7G,OAAO,CAACqC,SAAR,CAAkB0E,GAAlB,GAAwB,UAAUF,IAAV,EAAgB;EACtC,KAAKG,IAAL,GAAYH,IAAZ;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA7G,OAAO,CAACqC,SAAR,CAAkB4E,GAAlB,GAAwB,UAAUJ,IAAV,EAAgB;EACtC,IAAI,QAAOA,IAAP,MAAgB,QAAhB,IAA4B,CAACL,MAAM,CAACU,QAAP,CAAgBL,IAAhB,CAAjC,EAAwD;IACtD,KAAKM,IAAL,GAAYN,IAAI,CAACI,GAAjB;IACA,KAAKG,WAAL,GAAmBP,IAAI,CAACQ,UAAxB;EACD,CAHD,MAGO;IACL,KAAKF,IAAL,GAAYN,IAAZ;EACD;;EAED,OAAO,IAAP;AACD,CATD;AAWA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA7G,OAAO,CAACqC,SAAR,CAAkBwE,IAAlB,GAAyB,UAAUA,IAAV,EAAgB;EACvC,KAAKS,KAAL,GAAaT,IAAb;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA7G,OAAO,CAACqC,SAAR,CAAkBkF,eAAlB,GAAoC,YAAY;EAC9C,KAAKC,gBAAL,GAAwB,IAAxB;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AAEA;;;AACAxH,OAAO,CAACqC,SAAR,CAAkBzC,OAAlB,GAA4B,YAAY;EAAA;;EACtC,IAAI,KAAKgF,GAAT,EAAc,OAAO,KAAKA,GAAZ;EAEd,IAAMjC,OAAO,GAAG,EAAhB;;EAEA,IAAI;IACF,IAAMqB,KAAK,GAAGtF,EAAE,CAACgC,SAAH,CAAa,KAAKhC,EAAlB,EAAsB;MAClC+I,OAAO,EAAE,KADyB;MAElCC,kBAAkB,EAAE;IAFc,CAAtB,CAAd;;IAIA,IAAI1D,KAAJ,EAAW;MACT,KAAKtF,EAAL,GAAU,EAAV;;MACA,KAAKiD,MAAL,CAAYuC,IAAZ,CAAiBF,KAAjB;IACD;;IAED,KAAK2D,oBAAL;EACD,CAXD,CAWE,OAAOC,GAAP,EAAY;IACZ,OAAO,KAAKxE,IAAL,CAAU,OAAV,EAAmBwE,GAAnB,CAAP;EACD;;EAED,IAAM9H,GAAN,GAAc,IAAd,CAAMA,GAAN;EACA,IAAM+H,OAAO,GAAG,KAAKC,QAArB,CArBsC,CAuBtC;EACA;EACA;;EACA,IAAIC,oBAAJ;;EACA,IAAIjI,GAAG,CAAC+D,QAAJ,CAAa,GAAb,CAAJ,EAAuB;IACrB,IAAMmE,eAAe,GAAGlI,GAAG,CAACmI,OAAJ,CAAY,GAAZ,CAAxB;;IAEA,IAAID,eAAe,KAAK,CAAC,CAAzB,EAA4B;MAC1B,IAAME,WAAW,GAAGpI,GAAG,CAACqI,KAAJ,CAAUH,eAAe,GAAG,CAA5B,CAApB;MACAD,oBAAoB,GAAGG,WAAW,CAACE,KAAZ,CAAkB,QAAlB,CAAvB;IACD;EACF,CAlCqC,CAoCtC;;;EACA,IAAItI,GAAG,CAACmI,OAAJ,CAAY,MAAZ,MAAwB,CAA5B,EAA+BnI,GAAG,oBAAaA,GAAb,CAAH;EAC/BA,GAAG,GAAG7B,KAAK,CAAC6B,GAAD,CAAX,CAtCsC,CAwCtC;;EACA,IAAIiI,oBAAJ,EAA0B;IACxB,IAAIM,CAAC,GAAG,CAAR;IACAvI,GAAG,CAACkE,KAAJ,GAAYlE,GAAG,CAACkE,KAAJ,CAAUsE,OAAV,CAAkB,MAAlB,EAA0B;MAAA,OAAMP,oBAAoB,CAACM,CAAC,EAAF,CAA1B;IAAA,CAA1B,CAAZ;IACAvI,GAAG,CAACyI,MAAJ,cAAiBzI,GAAG,CAACkE,KAArB;IACAlE,GAAG,CAACuD,IAAJ,GAAWvD,GAAG,CAAC0I,QAAJ,GAAe1I,GAAG,CAACyI,MAA9B;EACD,CA9CqC,CAgDtC;;;EACA,IAAI,iBAAiBE,IAAjB,CAAsB3I,GAAG,CAAC4I,QAA1B,MAAwC,IAA5C,EAAkD;IAChD;IACA5I,GAAG,CAAC4I,QAAJ,aAAkB5I,GAAG,CAAC4I,QAAJ,CAAaC,KAAb,CAAmB,GAAnB,EAAwB,CAAxB,CAAlB,OAFgD,CAIhD;;IACA,IAAMC,SAAS,GAAG9I,GAAG,CAACuD,IAAJ,CAAS+E,KAAT,CAAe,eAAf,CAAlB;IACAzF,OAAO,CAACkG,UAAR,GAAqBD,SAAS,CAAC,CAAD,CAAT,CAAaN,OAAb,CAAqB,MAArB,EAA6B,GAA7B,CAArB;IACAxI,GAAG,CAACuD,IAAJ,GAAWuF,SAAS,CAAC,CAAD,CAApB;EACD,CAzDqC,CA2DtC;;;EACA,IAAI,KAAKE,gBAAT,EAA2B;IACzB,WAAqBhJ,GAArB;IAAA,IAAQiJ,QAAR,QAAQA,QAAR;IACA,IAAMX,KAAK,GACTW,QAAQ,IAAI,KAAKD,gBAAjB,GACI,KAAKA,gBAAL,CAAsBC,QAAtB,CADJ,GAEI,KAAKD,gBAAL,CAAsB,GAAtB,CAHN;;IAIA,IAAIV,KAAJ,EAAW;MACT;MACA,IAAI,CAAC,KAAKtH,OAAL,CAAaiF,IAAlB,EAAwB;QACtB,KAAKnC,GAAL,CAAS,MAAT,EAAiB9D,GAAG,CAACiG,IAArB;MACD;;MAED,IAAIiD,OAAJ;MACA,IAAIC,OAAJ;;MAEA,IAAI,QAAOb,KAAP,MAAiB,QAArB,EAA+B;QAC7BY,OAAO,GAAGZ,KAAK,CAACrC,IAAhB;QACAkD,OAAO,GAAGb,KAAK,CAACc,IAAhB;MACD,CAHD,MAGO;QACLF,OAAO,GAAGZ,KAAV;QACAa,OAAO,GAAGnJ,GAAG,CAACoJ,IAAd;MACD,CAfQ,CAiBT;;;MACApJ,GAAG,CAACiG,IAAJ,GAAW,IAAI0C,IAAJ,CAASO,OAAT,eAAwBA,OAAxB,SAAqCA,OAAhD;;MACA,IAAIC,OAAJ,EAAa;QACXnJ,GAAG,CAACiG,IAAJ,eAAgBkD,OAAhB;QACAnJ,GAAG,CAACoJ,IAAJ,GAAWD,OAAX;MACD;;MAEDnJ,GAAG,CAACiJ,QAAJ,GAAeC,OAAf;IACD;EACF,CA5FqC,CA8FtC;;;EACArG,OAAO,CAAC9C,MAAR,GAAiB,KAAKA,MAAtB;EACA8C,OAAO,CAACuG,IAAR,GAAepJ,GAAG,CAACoJ,IAAnB;EACAvG,OAAO,CAACU,IAAR,GAAevD,GAAG,CAACuD,IAAnB;EACAV,OAAO,CAACoD,IAAR,GAAejG,GAAG,CAACiJ,QAAnB;EACApG,OAAO,CAACiE,EAAR,GAAa,KAAKE,GAAlB;EACAnE,OAAO,CAACoE,GAAR,GAAc,KAAKC,IAAnB;EACArE,OAAO,CAACsE,GAAR,GAAc,KAAKE,IAAnB;EACAxE,OAAO,CAACkE,IAAR,GAAe,KAAKS,KAApB;EACA3E,OAAO,CAAC0E,UAAR,GAAqB,KAAKD,WAA1B;EACAzE,OAAO,CAACtC,KAAR,GAAgB,KAAKgB,MAArB;EACAsB,OAAO,CAACe,MAAR,GAAiB,KAAK3B,OAAtB;EACAY,OAAO,CAACwG,kBAAR,GACE,OAAO,KAAK3B,gBAAZ,KAAiC,SAAjC,GACI,CAAC,KAAKA,gBADV,GAEI9H,OAAO,CAACyB,GAAR,CAAYiI,4BAAZ,KAA6C,GAHnD,CA1GsC,CA+GtC;;EACA,IAAI,KAAKtI,OAAL,CAAaiF,IAAjB,EAAuB;IACrBpD,OAAO,CAAC0G,UAAR,GAAqB,KAAKvI,OAAL,CAAaiF,IAAb,CAAkBuC,OAAlB,CAA0B,OAA1B,EAAmC,EAAnC,CAArB;EACD;;EAED,IACE,KAAKgB,eAAL,IACA,4CAA4Cb,IAA5C,CAAiD3I,GAAG,CAACiJ,QAArD,CAFF,EAGE;IACApG,OAAO,CAACwG,kBAAR,GAA6B,KAA7B;EACD,CAzHqC,CA2HtC;;;EACA,IAAMI,OAAO,GAAG,KAAKtI,YAAL,GACZlB,OAAO,CAACS,SAAR,CAAkB,QAAlB,EAA4BgJ,WAA5B,CAAwC1J,GAAG,CAAC4I,QAA5C,CADY,GAEZ3I,OAAO,CAACS,SAAR,CAAkBV,GAAG,CAAC4I,QAAtB,CAFJ,CA5HsC,CAgItC;;EACA,KAAK9D,GAAL,GAAW2E,OAAO,CAAC3J,OAAR,CAAgB+C,OAAhB,CAAX;EACA,IAAQiC,GAAR,GAAgB,IAAhB,CAAQA,GAAR,CAlIsC,CAoItC;;EACAA,GAAG,CAAC6E,UAAJ,CAAe,IAAf;;EAEA,IAAI9G,OAAO,CAAC9C,MAAR,KAAmB,MAAvB,EAA+B;IAC7B+E,GAAG,CAAC8E,SAAJ,CAAc,iBAAd,EAAiC,eAAjC;EACD;;EAED,KAAKhB,QAAL,GAAgB5I,GAAG,CAAC4I,QAApB;EACA,KAAK3C,IAAL,GAAYjG,GAAG,CAACiG,IAAhB,CA5IsC,CA8ItC;;EACAnB,GAAG,CAAC3C,IAAJ,CAAS,OAAT,EAAkB,YAAM;IACtB,MAAI,CAACmB,IAAL,CAAU,OAAV;EACD,CAFD;EAIAwB,GAAG,CAAC5B,EAAJ,CAAO,OAAP,EAAgB,UAACC,KAAD,EAAW;IACzB;IACA;IACA;IACA,IAAI,MAAI,CAACkC,QAAT,EAAmB,OAJM,CAKzB;IACA;;IACA,IAAI,MAAI,CAAC2C,QAAL,KAAkBD,OAAtB,EAA+B,OAPN,CAQzB;IACA;;IACA,IAAI,MAAI,CAAC8B,QAAT,EAAmB;;IACnB,MAAI,CAACnG,QAAL,CAAcP,KAAd;EACD,CAZD,EAnJsC,CAiKtC;;EACA,IAAInD,GAAG,CAACqG,IAAR,EAAc;IACZ,IAAMA,IAAI,GAAGrG,GAAG,CAACqG,IAAJ,CAASwC,KAAT,CAAe,GAAf,CAAb;IACA,KAAKxC,IAAL,CAAUA,IAAI,CAAC,CAAD,CAAd,EAAmBA,IAAI,CAAC,CAAD,CAAvB;EACD;;EAED,IAAI,KAAKyD,QAAL,IAAiB,KAAKC,QAA1B,EAAoC;IAClC,KAAK1D,IAAL,CAAU,KAAKyD,QAAf,EAAyB,KAAKC,QAA9B;EACD;;EAED,KAAK,IAAM9C,GAAX,IAAkB,KAAKhG,MAAvB,EAA+B;IAC7B,IAAIvB,MAAM,CAAC,KAAKuB,MAAN,EAAcgG,GAAd,CAAV,EAA8BnC,GAAG,CAAC8E,SAAJ,CAAc3C,GAAd,EAAmB,KAAKhG,MAAL,CAAYgG,GAAZ,CAAnB;EAC/B,CA7KqC,CA+KtC;;;EACA,IAAI,KAAKrF,OAAT,EAAkB;IAChB,IAAIlC,MAAM,CAAC,KAAKsB,OAAN,EAAe,QAAf,CAAV,EAAoC;MAClC;MACA,IAAMgJ,YAAY,GAAG,IAAI9K,SAAS,CAACA,SAAd,EAArB;MACA8K,YAAY,CAACC,UAAb,CAAwB,KAAKjJ,OAAL,CAAakJ,MAAb,CAAoBrB,KAApB,CAA0B,GAA1B,CAAxB;MACAmB,YAAY,CAACC,UAAb,CAAwB,KAAKrI,OAAL,CAAaiH,KAAb,CAAmB,GAAnB,CAAxB;MACA/D,GAAG,CAAC8E,SAAJ,CACE,QADF,EAEEI,YAAY,CAACG,UAAb,CAAwBjL,SAAS,CAACkL,gBAAV,CAA2BC,GAAnD,EAAwDC,aAAxD,EAFF;IAID,CATD,MASO;MACLxF,GAAG,CAAC8E,SAAJ,CAAc,QAAd,EAAwB,KAAKhI,OAA7B;IACD;EACF;;EAED,OAAOkD,GAAP;AACD,CAhMD;AAkMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA5E,OAAO,CAACqC,SAAR,CAAkBmB,QAAlB,GAA6B,UAAUP,KAAV,EAAiB4B,GAAjB,EAAsB;EACjD,IAAI,KAAKwF,YAAL,CAAkBpH,KAAlB,EAAyB4B,GAAzB,CAAJ,EAAmC;IACjC,OAAO,KAAKyF,MAAL,EAAP;EACD,CAHgD,CAKjD;;;EACA,IAAMC,EAAE,GAAG,KAAKrE,SAAL,IAAkB5F,IAA7B;EACA,KAAK4B,YAAL;EACA,IAAI,KAAKqB,MAAT,EAAiB,OAAOiH,OAAO,CAACC,IAAR,CAAa,iCAAb,CAAP;EACjB,KAAKlH,MAAL,GAAc,IAAd;;EAEA,IAAI,CAACN,KAAL,EAAY;IACV,IAAI;MACF,IAAI,CAAC,KAAKyH,aAAL,CAAmB7F,GAAnB,CAAL,EAA8B;QAC5B,IAAI8F,OAAO,GAAG,4BAAd;;QACA,IAAI9F,GAAJ,EAAS;UACP8F,OAAO,GAAGrM,IAAI,CAACsM,YAAL,CAAkB/F,GAAG,CAACgG,MAAtB,KAAiCF,OAA3C;QACD;;QAED1H,KAAK,GAAG,IAAIV,KAAJ,CAAUoI,OAAV,CAAR;QACA1H,KAAK,CAAC4H,MAAN,GAAehG,GAAG,GAAGA,GAAG,CAACgG,MAAP,GAAgB7I,SAAlC;MACD;IACF,CAVD,CAUE,OAAO4F,GAAP,EAAY;MACZ3E,KAAK,GAAG2E,GAAR;IACD;EACF,CAzBgD,CA2BjD;EACA;;;EACA,IAAI,CAAC3E,KAAL,EAAY;IACV,OAAOsH,EAAE,CAAC,IAAD,EAAO1F,GAAP,CAAT;EACD;;EAED5B,KAAK,CAAC0G,QAAN,GAAiB9E,GAAjB;EACA,IAAI,KAAKiG,WAAT,EAAsB7H,KAAK,CAAC4E,OAAN,GAAgB,KAAKC,QAAL,GAAgB,CAAhC,CAlC2B,CAoCjD;EACA;;EACA,IAAI7E,KAAK,IAAI,KAAK8H,SAAL,CAAe,OAAf,EAAwB5K,MAAxB,GAAiC,CAA9C,EAAiD;IAC/C,KAAKiD,IAAL,CAAU,OAAV,EAAmBH,KAAnB;EACD;;EAEDsH,EAAE,CAACtH,KAAD,EAAQ4B,GAAR,CAAF;AACD,CA3CD;AA6CA;AACA;AACA;AACA;AACA;AACA;AACA;;;AACA7E,OAAO,CAACqC,SAAR,CAAkB2I,OAAlB,GAA4B,UAAUC,MAAV,EAAkB;EAC5C,OACEzE,MAAM,CAACU,QAAP,CAAgB+D,MAAhB,KACAA,MAAM,YAAY7M,MADlB,IAEA6M,MAAM,YAAYpM,QAHpB;AAKD,CAND;AAQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAmB,OAAO,CAACqC,SAAR,CAAkB6C,aAAlB,GAAkC,UAAUgG,IAAV,EAAgBC,KAAhB,EAAuB;EACvD,IAAMxB,QAAQ,GAAG,IAAIrK,QAAJ,CAAa,IAAb,CAAjB;EACA,KAAKqK,QAAL,GAAgBA,QAAhB;EACAA,QAAQ,CAAClI,SAAT,GAAqB,KAAKI,aAA1B;;EACA,IAAIG,SAAS,KAAKkJ,IAAlB,EAAwB;IACtBvB,QAAQ,CAACuB,IAAT,GAAgBA,IAAhB;EACD;;EAEDvB,QAAQ,CAACwB,KAAT,GAAiBA,KAAjB;;EACA,IAAI,KAAKlF,UAAT,EAAqB;IACnB0D,QAAQ,CAACnF,IAAT,GAAgB,YAAY;MAC1B,MAAM,IAAIjC,KAAJ,CACJ,iEADI,CAAN;IAGD,CAJD;EAKD;;EAED,KAAKa,IAAL,CAAU,UAAV,EAAsBuG,QAAtB;EACA,OAAOA,QAAP;AACD,CAnBD;;AAqBA3J,OAAO,CAACqC,SAAR,CAAkBpC,GAAlB,GAAwB,UAAUsK,EAAV,EAAc;EACpC,KAAK3K,OAAL;EACAb,KAAK,CAAC,OAAD,EAAU,KAAKc,MAAf,EAAuB,KAAKC,GAA5B,CAAL;;EAEA,IAAI,KAAKmG,UAAT,EAAqB;IACnB,MAAM,IAAI1D,KAAJ,CACJ,8DADI,CAAN;EAGD;;EAED,KAAK0D,UAAL,GAAkB,IAAlB,CAVoC,CAYpC;;EACA,KAAKC,SAAL,GAAiBqE,EAAE,IAAIjK,IAAvB;;EAEA,KAAK8K,IAAL;AACD,CAhBD;;AAkBApL,OAAO,CAACqC,SAAR,CAAkB+I,IAAlB,GAAyB,YAAY;EAAA;;EACnC,IAAI,KAAKjG,QAAT,EACE,OAAO,KAAK3B,QAAL,CACL,IAAIjB,KAAJ,CAAU,4DAAV,CADK,CAAP;EAIF,IAAI+B,IAAI,GAAG,KAAK1B,KAAhB;EACA,IAAQgC,GAAR,GAAgB,IAAhB,CAAQA,GAAR;EACA,IAAQ/E,MAAR,GAAmB,IAAnB,CAAQA,MAAR;;EAEA,KAAKwL,YAAL,GAVmC,CAYnC;;;EACA,IAAIxL,MAAM,KAAK,MAAX,IAAqB,CAAC+E,GAAG,CAAC0G,WAA9B,EAA2C;IACzC;IACA,IAAI,OAAOhH,IAAP,KAAgB,QAApB,EAA8B;MAC5B,IAAIiH,WAAW,GAAG3G,GAAG,CAAC4G,SAAJ,CAAc,cAAd,CAAlB,CAD4B,CAE5B;;MACA,IAAID,WAAJ,EAAiBA,WAAW,GAAGA,WAAW,CAAC5C,KAAZ,CAAkB,GAAlB,EAAuB,CAAvB,CAAd;MACjB,IAAIlI,SAAS,GAAG,KAAKgL,WAAL,IAAoB1L,OAAO,CAACU,SAAR,CAAkB8K,WAAlB,CAApC;;MACA,IAAI,CAAC9K,SAAD,IAAciL,MAAM,CAACH,WAAD,CAAxB,EAAuC;QACrC9K,SAAS,GAAGV,OAAO,CAACU,SAAR,CAAkB,kBAAlB,CAAZ;MACD;;MAED,IAAIA,SAAJ,EAAe6D,IAAI,GAAG7D,SAAS,CAAC6D,IAAD,CAAhB;IAChB,CAZwC,CAczC;;;IACA,IAAIA,IAAI,IAAI,CAACM,GAAG,CAAC4G,SAAJ,CAAc,gBAAd,CAAb,EAA8C;MAC5C5G,GAAG,CAAC8E,SAAJ,CACE,gBADF,EAEElD,MAAM,CAACU,QAAP,CAAgB5C,IAAhB,IAAwBA,IAAI,CAACnE,MAA7B,GAAsCqG,MAAM,CAACmF,UAAP,CAAkBrH,IAAlB,CAFxC;IAID;EACF,CAlCkC,CAoCnC;EACA;;;EACAM,GAAG,CAAC3C,IAAJ,CAAS,UAAT,EAAqB,UAAC4C,GAAD,EAAS;IAC5B9F,KAAK,CAAC,aAAD,EAAgB,MAAI,CAACc,MAArB,EAA6B,MAAI,CAACC,GAAlC,EAAuC+E,GAAG,CAACE,UAA3C,CAAL;;IAEA,IAAI,MAAI,CAAC6G,qBAAT,EAAgC;MAC9B1J,YAAY,CAAC,MAAI,CAAC0J,qBAAN,CAAZ;IACD;;IAED,IAAI,MAAI,CAAClH,KAAT,EAAgB;MACd;IACD;;IAED,IAAMmH,GAAG,GAAG,MAAI,CAAC7G,aAAjB;IACA,IAAMrG,IAAI,GAAGQ,KAAK,CAACwE,IAAN,CAAWkB,GAAG,CAACY,OAAJ,CAAY,cAAZ,KAA+B,EAA1C,KAAiD,YAA9D;IACA,IAAI9B,IAAI,GAAGhF,IAAI,CAACgK,KAAL,CAAW,GAAX,EAAgB,CAAhB,CAAX;IACA,IAAIhF,IAAJ,EAAUA,IAAI,GAAGA,IAAI,CAACmI,WAAL,GAAmBC,IAAnB,EAAP;IACV,IAAMC,SAAS,GAAGrI,IAAI,KAAK,WAA3B;IACA,IAAMsI,QAAQ,GAAGnH,UAAU,CAACD,GAAG,CAACE,UAAL,CAA3B;IACA,IAAMmH,YAAY,GAAG,MAAI,CAACC,aAA1B;IAEA,MAAI,CAACtH,GAAL,GAAWA,GAAX,CAnB4B,CAqB5B;;IACA,IAAIoH,QAAQ,IAAI,MAAI,CAACzK,UAAL,OAAsBqK,GAAtC,EAA2C;MACzC,OAAO,MAAI,CAAC5G,SAAL,CAAeJ,GAAf,CAAP;IACD;;IAED,IAAI,MAAI,CAAChF,MAAL,KAAgB,MAApB,EAA4B;MAC1B,MAAI,CAACuD,IAAL,CAAU,KAAV;;MACA,MAAI,CAACI,QAAL,CAAc,IAAd,EAAoB,MAAI,CAAC0B,aAAL,EAApB;;MACA;IACD,CA9B2B,CAgC5B;;;IACA,IAAI,MAAI,CAACE,YAAL,CAAkBP,GAAlB,CAAJ,EAA4B;MAC1BxF,KAAK,CAACuF,GAAD,EAAMC,GAAN,CAAL;IACD;;IAED,IAAIlE,MAAM,GAAG,MAAI,CAAC6E,OAAlB;;IACA,IAAI7E,MAAM,KAAKqB,SAAX,IAAwBrD,IAAI,IAAIoB,OAAO,CAACY,MAA5C,EAAoD;MAClDA,MAAM,GAAGO,OAAO,CAACnB,OAAO,CAACY,MAAR,CAAehC,IAAf,CAAD,CAAhB;IACD;;IAED,IAAIyN,MAAM,GAAG,MAAI,CAACC,OAAlB;;IACA,IAAIrK,SAAS,KAAKrB,MAAd,IAAwByL,MAA5B,EAAoC;MAClC5B,OAAO,CAACC,IAAR,CACE,0LADF;MAGA9J,MAAM,GAAG,IAAT;IACD;;IAED,IAAI,CAACyL,MAAL,EAAa;MACX,IAAIF,YAAJ,EAAkB;QAChBE,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAcqO,KAAvB,CADgB,CACc;;QAC9B3L,MAAM,GAAG,IAAT;MACD,CAHD,MAGO,IAAIqL,SAAJ,EAAe;QACpB,IAAMO,IAAI,GAAGzN,UAAU,EAAvB;QACAsN,MAAM,GAAGG,IAAI,CAACtO,KAAL,CAAWkE,IAAX,CAAgBoK,IAAhB,CAAT;QACA5L,MAAM,GAAG,IAAT;MACD,CAJM,MAIA,IAAI6L,cAAc,CAAC7N,IAAD,CAAlB,EAA0B;QAC/ByN,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAcqO,KAAvB;QACA3L,MAAM,GAAG,IAAT,CAF+B,CAEhB;MAChB,CAHM,MAGA,IAAIZ,OAAO,CAAC9B,KAAR,CAAcU,IAAd,CAAJ,EAAyB;QAC9ByN,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAcU,IAAd,CAAT;MACD,CAFM,MAEA,IAAIgF,IAAI,KAAK,MAAb,EAAqB;QAC1ByI,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAcwO,IAAvB;QACA9L,MAAM,GAAGA,MAAM,KAAK,KAApB,CAF0B,CAI1B;MACD,CALM,MAKA,IAAI+K,MAAM,CAAC/M,IAAD,CAAV,EAAkB;QACvByN,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAc,kBAAd,CAAT;QACA0C,MAAM,GAAGA,MAAM,KAAK,KAApB;MACD,CAHM,MAGA,IAAIA,MAAJ,EAAY;QACjByL,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAcwO,IAAvB;MACD,CAFM,MAEA,IAAIzK,SAAS,KAAKrB,MAAlB,EAA0B;QAC/ByL,MAAM,GAAGrM,OAAO,CAAC9B,KAAR,CAAcqO,KAAvB,CAD+B,CACD;;QAC9B3L,MAAM,GAAG,IAAT;MACD;IACF,CA7E2B,CA+E5B;;;IACA,IAAKqB,SAAS,KAAKrB,MAAd,IAAwB+L,MAAM,CAAC/N,IAAD,CAA/B,IAA0C+M,MAAM,CAAC/M,IAAD,CAApD,EAA4D;MAC1DgC,MAAM,GAAG,IAAT;IACD;;IAED,MAAI,CAACgM,YAAL,GAAoBhM,MAApB;IACA,IAAIiM,gBAAgB,GAAG,KAAvB;;IACA,IAAIjM,MAAJ,EAAY;MACV;MACA,IAAIkM,iBAAiB,GAAG,MAAI,CAACC,gBAAL,IAAyB,SAAjD;MACAjI,GAAG,CAAC7B,EAAJ,CAAO,MAAP,EAAe,UAAC+J,GAAD,EAAS;QACtBF,iBAAiB,IAAIE,GAAG,CAACpB,UAAJ,IAAkBoB,GAAG,CAAC5M,MAAJ,GAAa,CAA/B,GAAmC4M,GAAG,CAAC5M,MAAvC,GAAgD,CAArE;;QACA,IAAI0M,iBAAiB,GAAG,CAAxB,EAA2B;UACzB;UACA,IAAM5J,KAAK,GAAG,IAAIV,KAAJ,CAAU,+BAAV,CAAd;UACAU,KAAK,CAACsC,IAAN,GAAa,WAAb,CAHyB,CAIzB;UACA;;UACAqH,gBAAgB,GAAG,KAAnB,CANyB,CAOzB;;UACA/H,GAAG,CAACmI,OAAJ,CAAY/J,KAAZ,EARyB,CASzB;;UACA,MAAI,CAACO,QAAL,CAAcP,KAAd,EAAqB,IAArB;QACD;MACF,CAdD;IAeD;;IAED,IAAImJ,MAAJ,EAAY;MACV,IAAI;QACF;QACA;QACAQ,gBAAgB,GAAGjM,MAAnB;QAEAyL,MAAM,CAACvH,GAAD,EAAM,UAAC5B,KAAD,EAAQgI,MAAR,EAAgBE,KAAhB,EAA0B;UACpC,IAAI,MAAI,CAAC8B,QAAT,EAAmB;YACjB;YACA;UACD,CAJmC,CAMpC;UACA;;;UACA,IAAIhK,KAAK,IAAI,CAAC,MAAI,CAACkC,QAAnB,EAA6B;YAC3B,OAAO,MAAI,CAAC3B,QAAL,CAAcP,KAAd,CAAP;UACD;;UAED,IAAI2J,gBAAJ,EAAsB;YACpB,MAAI,CAACxJ,IAAL,CAAU,KAAV;;YACA,MAAI,CAACI,QAAL,CAAc,IAAd,EAAoB,MAAI,CAAC0B,aAAL,CAAmB+F,MAAnB,EAA2BE,KAA3B,CAApB;UACD;QACF,CAhBK,CAAN;MAiBD,CAtBD,CAsBE,OAAOvD,GAAP,EAAY;QACZ,MAAI,CAACpE,QAAL,CAAcoE,GAAd;;QACA;MACD;IACF;;IAED,MAAI,CAAC/C,GAAL,GAAWA,GAAX,CAvI4B,CAyI5B;;IACA,IAAI,CAAClE,MAAL,EAAa;MACX5B,KAAK,CAAC,kBAAD,EAAqB,MAAI,CAACc,MAA1B,EAAkC,MAAI,CAACC,GAAvC,CAAL;;MACA,MAAI,CAAC0D,QAAL,CAAc,IAAd,EAAoB,MAAI,CAAC0B,aAAL,EAApB;;MACA,IAAI8G,SAAJ,EAAe,OAHJ,CAGY;;MACvBnH,GAAG,CAAC5C,IAAJ,CAAS,KAAT,EAAgB,YAAM;QACpBlD,KAAK,CAAC,WAAD,EAAc,MAAI,CAACc,MAAnB,EAA2B,MAAI,CAACC,GAAhC,CAAL;;QACA,MAAI,CAACsD,IAAL,CAAU,KAAV;MACD,CAHD;MAIA;IACD,CAnJ2B,CAqJ5B;;;IACAyB,GAAG,CAAC5C,IAAJ,CAAS,OAAT,EAAkB,UAACgB,KAAD,EAAW;MAC3B2J,gBAAgB,GAAG,KAAnB;;MACA,MAAI,CAACpJ,QAAL,CAAcP,KAAd,EAAqB,IAArB;IACD,CAHD;IAIA,IAAI,CAAC2J,gBAAL,EACE/H,GAAG,CAAC5C,IAAJ,CAAS,KAAT,EAAgB,YAAM;MACpBlD,KAAK,CAAC,WAAD,EAAc,MAAI,CAACc,MAAnB,EAA2B,MAAI,CAACC,GAAhC,CAAL,CADoB,CAEpB;;MACA,MAAI,CAACsD,IAAL,CAAU,KAAV;;MACA,MAAI,CAACI,QAAL,CAAc,IAAd,EAAoB,MAAI,CAAC0B,aAAL,EAApB;IACD,CALD;EAMH,CAjKD;EAmKA,KAAK9B,IAAL,CAAU,SAAV,EAAqB,IAArB;;EAEA,IAAM8J,kBAAkB,GAAG,SAArBA,kBAAqB,GAAM;IAC/B,IAAMC,gBAAgB,GAAG,IAAzB;IACA,IAAMC,KAAK,GAAGxI,GAAG,CAAC4G,SAAJ,CAAc,gBAAd,CAAd;IACA,IAAI6B,MAAM,GAAG,CAAb;IAEA,IAAMC,QAAQ,GAAG,IAAIlP,MAAM,CAACmP,SAAX,EAAjB;;IACAD,QAAQ,CAACE,UAAT,GAAsB,UAACC,KAAD,EAAQlJ,QAAR,EAAkBf,QAAlB,EAA+B;MACnD6J,MAAM,IAAII,KAAK,CAACtN,MAAhB;;MACA,MAAI,CAACiD,IAAL,CAAU,UAAV,EAAsB;QACpBsK,SAAS,EAAE,QADS;QAEpBP,gBAAgB,EAAhBA,gBAFoB;QAGpBE,MAAM,EAANA,MAHoB;QAIpBD,KAAK,EAALA;MAJoB,CAAtB;;MAMA5J,QAAQ,CAAC,IAAD,EAAOiK,KAAP,CAAR;IACD,CATD;;IAWA,OAAOH,QAAP;EACD,CAlBD;;EAoBA,IAAMK,cAAc,GAAG,SAAjBA,cAAiB,CAAChN,MAAD,EAAY;IACjC,IAAMiN,SAAS,GAAG,KAAK,IAAvB,CADiC,CACJ;;IAC7B,IAAMC,QAAQ,GAAG,IAAIzP,MAAM,CAAC0P,QAAX,EAAjB;IACA,IAAMC,WAAW,GAAGpN,MAAM,CAACR,MAA3B;IACA,IAAM6N,SAAS,GAAGD,WAAW,GAAGH,SAAhC;IACA,IAAMK,MAAM,GAAGF,WAAW,GAAGC,SAA7B;;IAEA,KAAK,IAAI3F,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG4F,MAApB,EAA4B5F,CAAC,IAAIuF,SAAjC,EAA4C;MAC1C,IAAMH,KAAK,GAAG9M,MAAM,CAACwH,KAAP,CAAaE,CAAb,EAAgBA,CAAC,GAAGuF,SAApB,CAAd;MACAC,QAAQ,CAAC3J,IAAT,CAAcuJ,KAAd;IACD;;IAED,IAAIO,SAAS,GAAG,CAAhB,EAAmB;MACjB,IAAME,eAAe,GAAGvN,MAAM,CAACwH,KAAP,CAAa,CAAC6F,SAAd,CAAxB;MACAH,QAAQ,CAAC3J,IAAT,CAAcgK,eAAd;IACD;;IAEDL,QAAQ,CAAC3J,IAAT,CAAc,IAAd,EAjBiC,CAiBZ;;IAErB,OAAO2J,QAAP;EACD,CApBD,CA/NmC,CAqPnC;;;EACA,IAAM3K,QAAQ,GAAG,KAAK5B,SAAtB;;EACA,IAAI4B,QAAJ,EAAc;IACZ;IACA,IAAMuC,OAAO,GAAGvC,QAAQ,CAAC0C,UAAT,EAAhB;;IACA,KAAK,IAAMyC,CAAX,IAAgB5C,OAAhB,EAAyB;MACvB,IAAIjG,MAAM,CAACiG,OAAD,EAAU4C,CAAV,CAAV,EAAwB;QACtBtJ,KAAK,CAAC,mCAAD,EAAsCsJ,CAAtC,EAAyC5C,OAAO,CAAC4C,CAAD,CAAhD,CAAL;QACAzD,GAAG,CAAC8E,SAAJ,CAAcrB,CAAd,EAAiB5C,OAAO,CAAC4C,CAAD,CAAxB;MACD;IACF,CARW,CAUZ;;;IACAnF,QAAQ,CAACiL,SAAT,CAAmB,UAAClL,KAAD,EAAQ9C,MAAR,EAAmB;MACpC;MACA,IAAI8C,KAAJ,EAAWlE,KAAK,CAAC,8BAAD,EAAiCkE,KAAjC,EAAwC9C,MAAxC,CAAL;MAEXpB,KAAK,CAAC,iCAAD,EAAoCoB,MAApC,CAAL;;MACA,IAAI,OAAOA,MAAP,KAAkB,QAAtB,EAAgC;QAC9ByE,GAAG,CAAC8E,SAAJ,CAAc,gBAAd,EAAgCvJ,MAAhC;MACD;;MAED+C,QAAQ,CAACsB,IAAT,CAAc0I,kBAAkB,EAAhC,EAAoC1I,IAApC,CAAyCI,GAAzC;IACD,CAVD;EAWD,CAtBD,MAsBO,IAAI4B,MAAM,CAACU,QAAP,CAAgB5C,IAAhB,CAAJ,EAA2B;IAChCqJ,cAAc,CAACrJ,IAAD,CAAd,CAAqBE,IAArB,CAA0B0I,kBAAkB,EAA5C,EAAgD1I,IAAhD,CAAqDI,GAArD;EACD,CAFM,MAEA;IACLA,GAAG,CAAC3E,GAAJ,CAAQqE,IAAR;EACD;AACF,CAlRD,C,CAoRA;;;AACAtE,OAAO,CAACqC,SAAR,CAAkB+C,YAAlB,GAAiC,UAACP,GAAD,EAAS;EACxC,IAAIA,GAAG,CAACE,UAAJ,KAAmB,GAAnB,IAA0BF,GAAG,CAACE,UAAJ,KAAmB,GAAjD,EAAsD;IACpD;IACA,OAAO,KAAP;EACD,CAJuC,CAMxC;;;EACA,IAAIF,GAAG,CAACY,OAAJ,CAAY,gBAAZ,MAAkC,GAAtC,EAA2C;IACzC;IACA,OAAO,KAAP;EACD,CAVuC,CAYxC;;;EACA,OAAO,2BAA2BgD,IAA3B,CAAgC5D,GAAG,CAACY,OAAJ,CAAY,kBAAZ,CAAhC,CAAP;AACD,CAdD;AAgBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AACAzF,OAAO,CAACqC,SAAR,CAAkB+L,OAAlB,GAA4B,UAAUC,eAAV,EAA2B;EACrD,IAAI,OAAOA,eAAP,KAA2B,QAA/B,EAAyC;IACvC,KAAKvF,gBAAL,GAAwB;MAAE,KAAKuF;IAAP,CAAxB;EACD,CAFD,MAEO,IAAI,QAAOA,eAAP,MAA2B,QAA/B,EAAyC;IAC9C,KAAKvF,gBAAL,GAAwBuF,eAAxB;EACD,CAFM,MAEA;IACL,KAAKvF,gBAAL,GAAwB9G,SAAxB;EACD;;EAED,OAAO,IAAP;AACD,CAVD;;AAYAhC,OAAO,CAACqC,SAAR,CAAkBiM,cAAlB,GAAmC,UAAUC,MAAV,EAAkB;EACnD,KAAKjF,eAAL,GAAuBiF,MAAM,KAAKvM,SAAX,GAAuB,IAAvB,GAA8BuM,MAArD;EACA,OAAO,IAAP;AACD,CAHD,C,CAKA;;;AACA,IAAI,CAAC3P,OAAO,CAACiF,QAAR,CAAiB,KAAjB,CAAL,EAA8B;EAC5B;EACA;EACA;EACAjF,OAAO,sBAAOA,OAAP,CAAP;EACAA,OAAO,CAACsF,IAAR,CAAa,KAAb;AACD;;2CAEkBtF,O;;;;;QAAViB,M;IACP,IAAM2O,IAAI,GAAG3O,MAAb;IACAA,MAAM,GAAGA,MAAM,KAAK,KAAX,GAAmB,QAAnB,GAA8BA,MAAvC;IAEAA,MAAM,GAAGA,MAAM,CAAC4O,WAAP,EAAT;;IACA7O,OAAO,CAAC4O,IAAD,CAAP,GAAgB,UAAC1O,GAAD,EAAMwE,IAAN,EAAYiG,EAAZ,EAAmB;MACjC,IAAM1J,QAAQ,GAAGjB,OAAO,CAACC,MAAD,EAASC,GAAT,CAAxB;;MACA,IAAI,OAAOwE,IAAP,KAAgB,UAApB,EAAgC;QAC9BiG,EAAE,GAAGjG,IAAL;QACAA,IAAI,GAAG,IAAP;MACD;;MAED,IAAIA,IAAJ,EAAU;QACR,IAAIzE,MAAM,KAAK,KAAX,IAAoBA,MAAM,KAAK,MAAnC,EAA2C;UACzCgB,QAAQ,CAACmD,KAAT,CAAeM,IAAf;QACD,CAFD,MAEO;UACLzD,QAAQ,CAAC6N,IAAT,CAAcpK,IAAd;QACD;MACF;;MAED,IAAIiG,EAAJ,EAAQ1J,QAAQ,CAACZ,GAAT,CAAasK,EAAb;MACR,OAAO1J,QAAP;IACD,CAjBD;;;EALF,oDAA4B;IAAA;EAuB3B;EAED;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AAEA,SAAS6L,MAAT,CAAgB/N,IAAhB,EAAsB;EACpB,IAAMgQ,KAAK,GAAGhQ,IAAI,CAACgK,KAAL,CAAW,GAAX,CAAd;EACA,IAAIhF,IAAI,GAAGgL,KAAK,CAAC,CAAD,CAAhB;EACA,IAAIhL,IAAJ,EAAUA,IAAI,GAAGA,IAAI,CAACmI,WAAL,GAAmBC,IAAnB,EAAP;EACV,IAAI6C,OAAO,GAAGD,KAAK,CAAC,CAAD,CAAnB;EACA,IAAIC,OAAJ,EAAaA,OAAO,GAAGA,OAAO,CAAC9C,WAAR,GAAsBC,IAAtB,EAAV;EAEb,OAAOpI,IAAI,KAAK,MAAT,IAAmBiL,OAAO,KAAK,uBAAtC;AACD;;AAED,SAASpC,cAAT,CAAwB7N,IAAxB,EAA8B;EAC5B,IAAIgF,IAAI,GAAGhF,IAAI,CAACgK,KAAL,CAAW,GAAX,EAAgB,CAAhB,CAAX;EACA,IAAIhF,IAAJ,EAAUA,IAAI,GAAGA,IAAI,CAACmI,WAAL,GAAmBC,IAAnB,EAAP;EAEV,OAAOpI,IAAI,KAAK,OAAT,IAAoBA,IAAI,KAAK,OAApC;AACD;AAED;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAAS+H,MAAT,CAAgB/M,IAAhB,EAAsB;EACpB;EACA;EACA,OAAO,sBAAsB8J,IAAtB,CAA2B9J,IAA3B,CAAP;AACD;AAED;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA,SAASmG,UAAT,CAAoBS,IAApB,EAA0B;EACxB,OAAO,CAAC,GAAD,EAAM,GAAN,EAAW,GAAX,EAAgB,GAAhB,EAAqB,GAArB,EAA0B,GAA1B,EAA+B1B,QAA/B,CAAwC0B,IAAxC,CAAP;AACD"} \ No newline at end of file diff --git a/node_modules/superagent/lib/node/parsers/image.js b/node_modules/superagent/lib/node/parsers/image.js new file mode 100644 index 0000000..f14145c --- /dev/null +++ b/node_modules/superagent/lib/node/parsers/image.js @@ -0,0 +1,13 @@ +"use strict"; + +module.exports = function (res, fn) { + var data = []; // Binary data needs binary storage + + res.on('data', function (chunk) { + data.push(chunk); + }); + res.on('end', function () { + fn(null, Buffer.concat(data)); + }); +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJtb2R1bGUiLCJleHBvcnRzIiwicmVzIiwiZm4iLCJkYXRhIiwib24iLCJjaHVuayIsInB1c2giLCJCdWZmZXIiLCJjb25jYXQiXSwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvbm9kZS9wYXJzZXJzL2ltYWdlLmpzIl0sInNvdXJjZXNDb250ZW50IjpbIm1vZHVsZS5leHBvcnRzID0gKHJlcywgZm4pID0+IHtcbiAgY29uc3QgZGF0YSA9IFtdOyAvLyBCaW5hcnkgZGF0YSBuZWVkcyBiaW5hcnkgc3RvcmFnZVxuXG4gIHJlcy5vbignZGF0YScsIChjaHVuaykgPT4ge1xuICAgIGRhdGEucHVzaChjaHVuayk7XG4gIH0pO1xuICByZXMub24oJ2VuZCcsICgpID0+IHtcbiAgICBmbihudWxsLCBCdWZmZXIuY29uY2F0KGRhdGEpKTtcbiAgfSk7XG59O1xuIl0sIm1hcHBpbmdzIjoiOztBQUFBQSxNQUFNLENBQUNDLE9BQVAsR0FBaUIsVUFBQ0MsR0FBRCxFQUFNQyxFQUFOLEVBQWE7RUFDNUIsSUFBTUMsSUFBSSxHQUFHLEVBQWIsQ0FENEIsQ0FDWDs7RUFFakJGLEdBQUcsQ0FBQ0csRUFBSixDQUFPLE1BQVAsRUFBZSxVQUFDQyxLQUFELEVBQVc7SUFDeEJGLElBQUksQ0FBQ0csSUFBTCxDQUFVRCxLQUFWO0VBQ0QsQ0FGRDtFQUdBSixHQUFHLENBQUNHLEVBQUosQ0FBTyxLQUFQLEVBQWMsWUFBTTtJQUNsQkYsRUFBRSxDQUFDLElBQUQsRUFBT0ssTUFBTSxDQUFDQyxNQUFQLENBQWNMLElBQWQsQ0FBUCxDQUFGO0VBQ0QsQ0FGRDtBQUdELENBVEQifQ== \ No newline at end of file diff --git a/node_modules/superagent/lib/node/parsers/index.js b/node_modules/superagent/lib/node/parsers/index.js new file mode 100644 index 0000000..15e9d07 --- /dev/null +++ b/node_modules/superagent/lib/node/parsers/index.js @@ -0,0 +1,12 @@ +"use strict"; + +exports['application/x-www-form-urlencoded'] = require('./urlencoded'); +exports['application/json'] = require('./json'); +exports.text = require('./text'); + +var binary = require('./image'); + +exports['application/octet-stream'] = binary; +exports['application/pdf'] = binary; +exports.image = binary; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJleHBvcnRzIiwicmVxdWlyZSIsInRleHQiLCJiaW5hcnkiLCJpbWFnZSJdLCJzb3VyY2VzIjpbIi4uLy4uLy4uL3NyYy9ub2RlL3BhcnNlcnMvaW5kZXguanMiXSwic291cmNlc0NvbnRlbnQiOlsiZXhwb3J0c1snYXBwbGljYXRpb24veC13d3ctZm9ybS11cmxlbmNvZGVkJ10gPSByZXF1aXJlKCcuL3VybGVuY29kZWQnKTtcbmV4cG9ydHNbJ2FwcGxpY2F0aW9uL2pzb24nXSA9IHJlcXVpcmUoJy4vanNvbicpO1xuZXhwb3J0cy50ZXh0ID0gcmVxdWlyZSgnLi90ZXh0Jyk7XG5cbmNvbnN0IGJpbmFyeSA9IHJlcXVpcmUoJy4vaW1hZ2UnKTtcblxuZXhwb3J0c1snYXBwbGljYXRpb24vb2N0ZXQtc3RyZWFtJ10gPSBiaW5hcnk7XG5leHBvcnRzWydhcHBsaWNhdGlvbi9wZGYnXSA9IGJpbmFyeTtcbmV4cG9ydHMuaW1hZ2UgPSBiaW5hcnk7XG4iXSwibWFwcGluZ3MiOiI7O0FBQUFBLE9BQU8sQ0FBQyxtQ0FBRCxDQUFQLEdBQStDQyxPQUFPLENBQUMsY0FBRCxDQUF0RDtBQUNBRCxPQUFPLENBQUMsa0JBQUQsQ0FBUCxHQUE4QkMsT0FBTyxDQUFDLFFBQUQsQ0FBckM7QUFDQUQsT0FBTyxDQUFDRSxJQUFSLEdBQWVELE9BQU8sQ0FBQyxRQUFELENBQXRCOztBQUVBLElBQU1FLE1BQU0sR0FBR0YsT0FBTyxDQUFDLFNBQUQsQ0FBdEI7O0FBRUFELE9BQU8sQ0FBQywwQkFBRCxDQUFQLEdBQXNDRyxNQUF0QztBQUNBSCxPQUFPLENBQUMsaUJBQUQsQ0FBUCxHQUE2QkcsTUFBN0I7QUFDQUgsT0FBTyxDQUFDSSxLQUFSLEdBQWdCRCxNQUFoQiJ9 \ No newline at end of file diff --git a/node_modules/superagent/lib/node/parsers/json.js b/node_modules/superagent/lib/node/parsers/json.js new file mode 100644 index 0000000..61a8364 --- /dev/null +++ b/node_modules/superagent/lib/node/parsers/json.js @@ -0,0 +1,26 @@ +"use strict"; + +module.exports = function (res, fn) { + res.text = ''; + res.setEncoding('utf8'); + res.on('data', function (chunk) { + res.text += chunk; + }); + res.on('end', function () { + var body; + var error; + + try { + body = res.text && JSON.parse(res.text); + } catch (err) { + error = err; // issue #675: return the raw response if the response parsing fails + + error.rawResponse = res.text || null; // issue #876: return the http status code if the response parsing fails + + error.statusCode = res.statusCode; + } finally { + fn(error, body); + } + }); +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJtb2R1bGUiLCJleHBvcnRzIiwicmVzIiwiZm4iLCJ0ZXh0Iiwic2V0RW5jb2RpbmciLCJvbiIsImNodW5rIiwiYm9keSIsImVycm9yIiwiSlNPTiIsInBhcnNlIiwiZXJyIiwicmF3UmVzcG9uc2UiLCJzdGF0dXNDb2RlIl0sInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL25vZGUvcGFyc2Vycy9qc29uLmpzIl0sInNvdXJjZXNDb250ZW50IjpbIm1vZHVsZS5leHBvcnRzID0gZnVuY3Rpb24gKHJlcywgZm4pIHtcbiAgcmVzLnRleHQgPSAnJztcbiAgcmVzLnNldEVuY29kaW5nKCd1dGY4Jyk7XG4gIHJlcy5vbignZGF0YScsIChjaHVuaykgPT4ge1xuICAgIHJlcy50ZXh0ICs9IGNodW5rO1xuICB9KTtcbiAgcmVzLm9uKCdlbmQnLCAoKSA9PiB7XG4gICAgbGV0IGJvZHk7XG4gICAgbGV0IGVycm9yO1xuICAgIHRyeSB7XG4gICAgICBib2R5ID0gcmVzLnRleHQgJiYgSlNPTi5wYXJzZShyZXMudGV4dCk7XG4gICAgfSBjYXRjaCAoZXJyKSB7XG4gICAgICBlcnJvciA9IGVycjtcbiAgICAgIC8vIGlzc3VlICM2NzU6IHJldHVybiB0aGUgcmF3IHJlc3BvbnNlIGlmIHRoZSByZXNwb25zZSBwYXJzaW5nIGZhaWxzXG4gICAgICBlcnJvci5yYXdSZXNwb25zZSA9IHJlcy50ZXh0IHx8IG51bGw7XG4gICAgICAvLyBpc3N1ZSAjODc2OiByZXR1cm4gdGhlIGh0dHAgc3RhdHVzIGNvZGUgaWYgdGhlIHJlc3BvbnNlIHBhcnNpbmcgZmFpbHNcbiAgICAgIGVycm9yLnN0YXR1c0NvZGUgPSByZXMuc3RhdHVzQ29kZTtcbiAgICB9IGZpbmFsbHkge1xuICAgICAgZm4oZXJyb3IsIGJvZHkpO1xuICAgIH1cbiAgfSk7XG59O1xuIl0sIm1hcHBpbmdzIjoiOztBQUFBQSxNQUFNLENBQUNDLE9BQVAsR0FBaUIsVUFBVUMsR0FBVixFQUFlQyxFQUFmLEVBQW1CO0VBQ2xDRCxHQUFHLENBQUNFLElBQUosR0FBVyxFQUFYO0VBQ0FGLEdBQUcsQ0FBQ0csV0FBSixDQUFnQixNQUFoQjtFQUNBSCxHQUFHLENBQUNJLEVBQUosQ0FBTyxNQUFQLEVBQWUsVUFBQ0MsS0FBRCxFQUFXO0lBQ3hCTCxHQUFHLENBQUNFLElBQUosSUFBWUcsS0FBWjtFQUNELENBRkQ7RUFHQUwsR0FBRyxDQUFDSSxFQUFKLENBQU8sS0FBUCxFQUFjLFlBQU07SUFDbEIsSUFBSUUsSUFBSjtJQUNBLElBQUlDLEtBQUo7O0lBQ0EsSUFBSTtNQUNGRCxJQUFJLEdBQUdOLEdBQUcsQ0FBQ0UsSUFBSixJQUFZTSxJQUFJLENBQUNDLEtBQUwsQ0FBV1QsR0FBRyxDQUFDRSxJQUFmLENBQW5CO0lBQ0QsQ0FGRCxDQUVFLE9BQU9RLEdBQVAsRUFBWTtNQUNaSCxLQUFLLEdBQUdHLEdBQVIsQ0FEWSxDQUVaOztNQUNBSCxLQUFLLENBQUNJLFdBQU4sR0FBb0JYLEdBQUcsQ0FBQ0UsSUFBSixJQUFZLElBQWhDLENBSFksQ0FJWjs7TUFDQUssS0FBSyxDQUFDSyxVQUFOLEdBQW1CWixHQUFHLENBQUNZLFVBQXZCO0lBQ0QsQ0FSRCxTQVFVO01BQ1JYLEVBQUUsQ0FBQ00sS0FBRCxFQUFRRCxJQUFSLENBQUY7SUFDRDtFQUNGLENBZEQ7QUFlRCxDQXJCRCJ9 \ No newline at end of file diff --git a/node_modules/superagent/lib/node/parsers/text.js b/node_modules/superagent/lib/node/parsers/text.js new file mode 100644 index 0000000..9f26d22 --- /dev/null +++ b/node_modules/superagent/lib/node/parsers/text.js @@ -0,0 +1,11 @@ +"use strict"; + +module.exports = function (res, fn) { + res.text = ''; + res.setEncoding('utf8'); + res.on('data', function (chunk) { + res.text += chunk; + }); + res.on('end', fn); +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJtb2R1bGUiLCJleHBvcnRzIiwicmVzIiwiZm4iLCJ0ZXh0Iiwic2V0RW5jb2RpbmciLCJvbiIsImNodW5rIl0sInNvdXJjZXMiOlsiLi4vLi4vLi4vc3JjL25vZGUvcGFyc2Vycy90ZXh0LmpzIl0sInNvdXJjZXNDb250ZW50IjpbIm1vZHVsZS5leHBvcnRzID0gKHJlcywgZm4pID0+IHtcbiAgcmVzLnRleHQgPSAnJztcbiAgcmVzLnNldEVuY29kaW5nKCd1dGY4Jyk7XG4gIHJlcy5vbignZGF0YScsIChjaHVuaykgPT4ge1xuICAgIHJlcy50ZXh0ICs9IGNodW5rO1xuICB9KTtcbiAgcmVzLm9uKCdlbmQnLCBmbik7XG59O1xuIl0sIm1hcHBpbmdzIjoiOztBQUFBQSxNQUFNLENBQUNDLE9BQVAsR0FBaUIsVUFBQ0MsR0FBRCxFQUFNQyxFQUFOLEVBQWE7RUFDNUJELEdBQUcsQ0FBQ0UsSUFBSixHQUFXLEVBQVg7RUFDQUYsR0FBRyxDQUFDRyxXQUFKLENBQWdCLE1BQWhCO0VBQ0FILEdBQUcsQ0FBQ0ksRUFBSixDQUFPLE1BQVAsRUFBZSxVQUFDQyxLQUFELEVBQVc7SUFDeEJMLEdBQUcsQ0FBQ0UsSUFBSixJQUFZRyxLQUFaO0VBQ0QsQ0FGRDtFQUdBTCxHQUFHLENBQUNJLEVBQUosQ0FBTyxLQUFQLEVBQWNILEVBQWQ7QUFDRCxDQVBEIn0= \ No newline at end of file diff --git a/node_modules/superagent/lib/node/parsers/urlencoded.js b/node_modules/superagent/lib/node/parsers/urlencoded.js new file mode 100644 index 0000000..d5ad7a6 --- /dev/null +++ b/node_modules/superagent/lib/node/parsers/urlencoded.js @@ -0,0 +1,22 @@ +"use strict"; + +/** + * Module dependencies. + */ +var qs = require('qs'); + +module.exports = function (res, fn) { + res.text = ''; + res.setEncoding('ascii'); + res.on('data', function (chunk) { + res.text += chunk; + }); + res.on('end', function () { + try { + fn(null, qs.parse(res.text)); + } catch (err) { + fn(err); + } + }); +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJxcyIsInJlcXVpcmUiLCJtb2R1bGUiLCJleHBvcnRzIiwicmVzIiwiZm4iLCJ0ZXh0Iiwic2V0RW5jb2RpbmciLCJvbiIsImNodW5rIiwicGFyc2UiLCJlcnIiXSwic291cmNlcyI6WyIuLi8uLi8uLi9zcmMvbm9kZS9wYXJzZXJzL3VybGVuY29kZWQuanMiXSwic291cmNlc0NvbnRlbnQiOlsiLyoqXG4gKiBNb2R1bGUgZGVwZW5kZW5jaWVzLlxuICovXG5cbmNvbnN0IHFzID0gcmVxdWlyZSgncXMnKTtcblxubW9kdWxlLmV4cG9ydHMgPSAocmVzLCBmbikgPT4ge1xuICByZXMudGV4dCA9ICcnO1xuICByZXMuc2V0RW5jb2RpbmcoJ2FzY2lpJyk7XG4gIHJlcy5vbignZGF0YScsIChjaHVuaykgPT4ge1xuICAgIHJlcy50ZXh0ICs9IGNodW5rO1xuICB9KTtcbiAgcmVzLm9uKCdlbmQnLCAoKSA9PiB7XG4gICAgdHJ5IHtcbiAgICAgIGZuKG51bGwsIHFzLnBhcnNlKHJlcy50ZXh0KSk7XG4gICAgfSBjYXRjaCAoZXJyKSB7XG4gICAgICBmbihlcnIpO1xuICAgIH1cbiAgfSk7XG59O1xuIl0sIm1hcHBpbmdzIjoiOztBQUFBO0FBQ0E7QUFDQTtBQUVBLElBQU1BLEVBQUUsR0FBR0MsT0FBTyxDQUFDLElBQUQsQ0FBbEI7O0FBRUFDLE1BQU0sQ0FBQ0MsT0FBUCxHQUFpQixVQUFDQyxHQUFELEVBQU1DLEVBQU4sRUFBYTtFQUM1QkQsR0FBRyxDQUFDRSxJQUFKLEdBQVcsRUFBWDtFQUNBRixHQUFHLENBQUNHLFdBQUosQ0FBZ0IsT0FBaEI7RUFDQUgsR0FBRyxDQUFDSSxFQUFKLENBQU8sTUFBUCxFQUFlLFVBQUNDLEtBQUQsRUFBVztJQUN4QkwsR0FBRyxDQUFDRSxJQUFKLElBQVlHLEtBQVo7RUFDRCxDQUZEO0VBR0FMLEdBQUcsQ0FBQ0ksRUFBSixDQUFPLEtBQVAsRUFBYyxZQUFNO0lBQ2xCLElBQUk7TUFDRkgsRUFBRSxDQUFDLElBQUQsRUFBT0wsRUFBRSxDQUFDVSxLQUFILENBQVNOLEdBQUcsQ0FBQ0UsSUFBYixDQUFQLENBQUY7SUFDRCxDQUZELENBRUUsT0FBT0ssR0FBUCxFQUFZO01BQ1pOLEVBQUUsQ0FBQ00sR0FBRCxDQUFGO0lBQ0Q7RUFDRixDQU5EO0FBT0QsQ0FiRCJ9 \ No newline at end of file diff --git a/node_modules/superagent/lib/node/response.js b/node_modules/superagent/lib/node/response.js new file mode 100644 index 0000000..0ee42ea --- /dev/null +++ b/node_modules/superagent/lib/node/response.js @@ -0,0 +1,137 @@ +"use strict"; + +/** + * Module dependencies. + */ +var util = require('util'); + +var Stream = require('stream'); + +var ResponseBase = require('../response-base'); + +var _require = require('../utils'), + mixin = _require.mixin; +/** + * Expose `Response`. + */ + + +module.exports = Response; +/** + * Initialize a new `Response` with the given `xhr`. + * + * - set flags (.ok, .error, etc) + * - parse header + * + * @param {Request} req + * @param {Object} options + * @constructor + * @extends {Stream} + * @implements {ReadableStream} + * @api private + */ + +function Response(request) { + Stream.call(this); + this.res = request.res; + var res = this.res; + this.request = request; + this.req = request.req; + this.text = res.text; + this.files = res.files || {}; + this.buffered = request._resBuffered; + this.headers = res.headers; + this.header = this.headers; + + this._setStatusProperties(res.statusCode); + + this._setHeaderProperties(this.header); + + this.setEncoding = res.setEncoding.bind(res); + res.on('data', this.emit.bind(this, 'data')); + res.on('end', this.emit.bind(this, 'end')); + res.on('close', this.emit.bind(this, 'close')); + res.on('error', this.emit.bind(this, 'error')); +} // Lazy access res.body. +// https://github.com/nodejs/node/pull/39520#issuecomment-889697136 + + +Object.defineProperty(Response.prototype, 'body', { + get: function get() { + return this._body !== undefined ? this._body : this.res.body !== undefined ? this.res.body : {}; + }, + set: function set(value) { + this._body = value; + } +}); +/** + * Inherit from `Stream`. + */ + +util.inherits(Response, Stream); +mixin(Response.prototype, ResponseBase.prototype); +/** + * Implements methods of a `ReadableStream` + */ + +Response.prototype.destroy = function (error) { + this.res.destroy(error); +}; +/** + * Pause. + */ + + +Response.prototype.pause = function () { + this.res.pause(); +}; +/** + * Resume. + */ + + +Response.prototype.resume = function () { + this.res.resume(); +}; +/** + * Return an `Error` representative of this response. + * + * @return {Error} + * @api public + */ + + +Response.prototype.toError = function () { + var req = this.req; + var method = req.method; + var path = req.path; + var message = "cannot ".concat(method, " ").concat(path, " (").concat(this.status, ")"); + var error = new Error(message); + error.status = this.status; + error.text = this.text; + error.method = method; + error.path = path; + return error; +}; + +Response.prototype.setStatusProperties = function (status) { + console.warn('In superagent 2.x setStatusProperties is a private method'); + return this._setStatusProperties(status); +}; +/** + * To json. + * + * @return {Object} + * @api public + */ + + +Response.prototype.toJSON = function () { + return { + req: this.request.toJSON(), + header: this.header, + status: this.status, + text: this.text + }; +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["util","require","Stream","ResponseBase","mixin","module","exports","Response","request","call","res","req","text","files","buffered","_resBuffered","headers","header","_setStatusProperties","statusCode","_setHeaderProperties","setEncoding","bind","on","emit","Object","defineProperty","prototype","get","_body","undefined","body","set","value","inherits","destroy","error","pause","resume","toError","method","path","message","status","Error","setStatusProperties","console","warn","toJSON"],"sources":["../../src/node/response.js"],"sourcesContent":["/**\n * Module dependencies.\n */\n\nconst util = require('util');\nconst Stream = require('stream');\nconst ResponseBase = require('../response-base');\nconst { mixin } = require('../utils');\n\n/**\n * Expose `Response`.\n */\n\nmodule.exports = Response;\n\n/**\n * Initialize a new `Response` with the given `xhr`.\n *\n *  - set flags (.ok, .error, etc)\n *  - parse header\n *\n * @param {Request} req\n * @param {Object} options\n * @constructor\n * @extends {Stream}\n * @implements {ReadableStream}\n * @api private\n */\n\nfunction Response(request) {\n  Stream.call(this);\n  this.res = request.res;\n  const { res } = this;\n  this.request = request;\n  this.req = request.req;\n  this.text = res.text;\n  this.files = res.files || {};\n  this.buffered = request._resBuffered;\n  this.headers = res.headers;\n  this.header = this.headers;\n  this._setStatusProperties(res.statusCode);\n  this._setHeaderProperties(this.header);\n  this.setEncoding = res.setEncoding.bind(res);\n  res.on('data', this.emit.bind(this, 'data'));\n  res.on('end', this.emit.bind(this, 'end'));\n  res.on('close', this.emit.bind(this, 'close'));\n  res.on('error', this.emit.bind(this, 'error'));\n}\n\n// Lazy access res.body.\n// https://github.com/nodejs/node/pull/39520#issuecomment-889697136\nObject.defineProperty(Response.prototype, 'body', {\n  get() {\n    return this._body !== undefined\n      ? this._body\n      : this.res.body !== undefined\n      ? this.res.body\n      : {};\n  },\n  set(value) {\n    this._body = value;\n  }\n});\n\n/**\n * Inherit from `Stream`.\n */\n\nutil.inherits(Response, Stream);\n\nmixin(Response.prototype, ResponseBase.prototype);\n\n/**\n * Implements methods of a `ReadableStream`\n */\n\nResponse.prototype.destroy = function (error) {\n  this.res.destroy(error);\n};\n\n/**\n * Pause.\n */\n\nResponse.prototype.pause = function () {\n  this.res.pause();\n};\n\n/**\n * Resume.\n */\n\nResponse.prototype.resume = function () {\n  this.res.resume();\n};\n\n/**\n * Return an `Error` representative of this response.\n *\n * @return {Error}\n * @api public\n */\n\nResponse.prototype.toError = function () {\n  const { req } = this;\n  const { method } = req;\n  const { path } = req;\n\n  const message = `cannot ${method} ${path} (${this.status})`;\n  const error = new Error(message);\n  error.status = this.status;\n  error.text = this.text;\n  error.method = method;\n  error.path = path;\n\n  return error;\n};\n\nResponse.prototype.setStatusProperties = function (status) {\n  console.warn('In superagent 2.x setStatusProperties is a private method');\n  return this._setStatusProperties(status);\n};\n\n/**\n * To json.\n *\n * @return {Object}\n * @api public\n */\n\nResponse.prototype.toJSON = function () {\n  return {\n    req: this.request.toJSON(),\n    header: this.header,\n    status: this.status,\n    text: this.text\n  };\n};\n"],"mappings":";;AAAA;AACA;AACA;AAEA,IAAMA,IAAI,GAAGC,OAAO,CAAC,MAAD,CAApB;;AACA,IAAMC,MAAM,GAAGD,OAAO,CAAC,QAAD,CAAtB;;AACA,IAAME,YAAY,GAAGF,OAAO,CAAC,kBAAD,CAA5B;;AACA,eAAkBA,OAAO,CAAC,UAAD,CAAzB;AAAA,IAAQG,KAAR,YAAQA,KAAR;AAEA;AACA;AACA;;;AAEAC,MAAM,CAACC,OAAP,GAAiBC,QAAjB;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAASA,QAAT,CAAkBC,OAAlB,EAA2B;EACzBN,MAAM,CAACO,IAAP,CAAY,IAAZ;EACA,KAAKC,GAAL,GAAWF,OAAO,CAACE,GAAnB;EACA,IAAQA,GAAR,GAAgB,IAAhB,CAAQA,GAAR;EACA,KAAKF,OAAL,GAAeA,OAAf;EACA,KAAKG,GAAL,GAAWH,OAAO,CAACG,GAAnB;EACA,KAAKC,IAAL,GAAYF,GAAG,CAACE,IAAhB;EACA,KAAKC,KAAL,GAAaH,GAAG,CAACG,KAAJ,IAAa,EAA1B;EACA,KAAKC,QAAL,GAAgBN,OAAO,CAACO,YAAxB;EACA,KAAKC,OAAL,GAAeN,GAAG,CAACM,OAAnB;EACA,KAAKC,MAAL,GAAc,KAAKD,OAAnB;;EACA,KAAKE,oBAAL,CAA0BR,GAAG,CAACS,UAA9B;;EACA,KAAKC,oBAAL,CAA0B,KAAKH,MAA/B;;EACA,KAAKI,WAAL,GAAmBX,GAAG,CAACW,WAAJ,CAAgBC,IAAhB,CAAqBZ,GAArB,CAAnB;EACAA,GAAG,CAACa,EAAJ,CAAO,MAAP,EAAe,KAAKC,IAAL,CAAUF,IAAV,CAAe,IAAf,EAAqB,MAArB,CAAf;EACAZ,GAAG,CAACa,EAAJ,CAAO,KAAP,EAAc,KAAKC,IAAL,CAAUF,IAAV,CAAe,IAAf,EAAqB,KAArB,CAAd;EACAZ,GAAG,CAACa,EAAJ,CAAO,OAAP,EAAgB,KAAKC,IAAL,CAAUF,IAAV,CAAe,IAAf,EAAqB,OAArB,CAAhB;EACAZ,GAAG,CAACa,EAAJ,CAAO,OAAP,EAAgB,KAAKC,IAAL,CAAUF,IAAV,CAAe,IAAf,EAAqB,OAArB,CAAhB;AACD,C,CAED;AACA;;;AACAG,MAAM,CAACC,cAAP,CAAsBnB,QAAQ,CAACoB,SAA/B,EAA0C,MAA1C,EAAkD;EAChDC,GADgD,iBAC1C;IACJ,OAAO,KAAKC,KAAL,KAAeC,SAAf,GACH,KAAKD,KADF,GAEH,KAAKnB,GAAL,CAASqB,IAAT,KAAkBD,SAAlB,GACA,KAAKpB,GAAL,CAASqB,IADT,GAEA,EAJJ;EAKD,CAP+C;EAQhDC,GARgD,eAQ5CC,KAR4C,EAQrC;IACT,KAAKJ,KAAL,GAAaI,KAAb;EACD;AAV+C,CAAlD;AAaA;AACA;AACA;;AAEAjC,IAAI,CAACkC,QAAL,CAAc3B,QAAd,EAAwBL,MAAxB;AAEAE,KAAK,CAACG,QAAQ,CAACoB,SAAV,EAAqBxB,YAAY,CAACwB,SAAlC,CAAL;AAEA;AACA;AACA;;AAEApB,QAAQ,CAACoB,SAAT,CAAmBQ,OAAnB,GAA6B,UAAUC,KAAV,EAAiB;EAC5C,KAAK1B,GAAL,CAASyB,OAAT,CAAiBC,KAAjB;AACD,CAFD;AAIA;AACA;AACA;;;AAEA7B,QAAQ,CAACoB,SAAT,CAAmBU,KAAnB,GAA2B,YAAY;EACrC,KAAK3B,GAAL,CAAS2B,KAAT;AACD,CAFD;AAIA;AACA;AACA;;;AAEA9B,QAAQ,CAACoB,SAAT,CAAmBW,MAAnB,GAA4B,YAAY;EACtC,KAAK5B,GAAL,CAAS4B,MAAT;AACD,CAFD;AAIA;AACA;AACA;AACA;AACA;AACA;;;AAEA/B,QAAQ,CAACoB,SAAT,CAAmBY,OAAnB,GAA6B,YAAY;EACvC,IAAQ5B,GAAR,GAAgB,IAAhB,CAAQA,GAAR;EACA,IAAQ6B,MAAR,GAAmB7B,GAAnB,CAAQ6B,MAAR;EACA,IAAQC,IAAR,GAAiB9B,GAAjB,CAAQ8B,IAAR;EAEA,IAAMC,OAAO,oBAAaF,MAAb,cAAuBC,IAAvB,eAAgC,KAAKE,MAArC,MAAb;EACA,IAAMP,KAAK,GAAG,IAAIQ,KAAJ,CAAUF,OAAV,CAAd;EACAN,KAAK,CAACO,MAAN,GAAe,KAAKA,MAApB;EACAP,KAAK,CAACxB,IAAN,GAAa,KAAKA,IAAlB;EACAwB,KAAK,CAACI,MAAN,GAAeA,MAAf;EACAJ,KAAK,CAACK,IAAN,GAAaA,IAAb;EAEA,OAAOL,KAAP;AACD,CAbD;;AAeA7B,QAAQ,CAACoB,SAAT,CAAmBkB,mBAAnB,GAAyC,UAAUF,MAAV,EAAkB;EACzDG,OAAO,CAACC,IAAR,CAAa,2DAAb;EACA,OAAO,KAAK7B,oBAAL,CAA0ByB,MAA1B,CAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;;;AAEApC,QAAQ,CAACoB,SAAT,CAAmBqB,MAAnB,GAA4B,YAAY;EACtC,OAAO;IACLrC,GAAG,EAAE,KAAKH,OAAL,CAAawC,MAAb,EADA;IAEL/B,MAAM,EAAE,KAAKA,MAFR;IAGL0B,MAAM,EAAE,KAAKA,MAHR;IAIL/B,IAAI,EAAE,KAAKA;EAJN,CAAP;AAMD,CAPD"} \ No newline at end of file diff --git a/node_modules/superagent/lib/node/unzip.js b/node_modules/superagent/lib/node/unzip.js new file mode 100644 index 0000000..e304f60 --- /dev/null +++ b/node_modules/superagent/lib/node/unzip.js @@ -0,0 +1,72 @@ +"use strict"; + +/** + * Module dependencies. + */ +var _require = require('string_decoder'), + StringDecoder = _require.StringDecoder; + +var Stream = require('stream'); + +var zlib = require('zlib'); +/** + * Buffers response data events and re-emits when they're unzipped. + * + * @param {Request} req + * @param {Response} res + * @api private + */ + + +exports.unzip = function (request, res) { + var unzip = zlib.createUnzip(); + var stream = new Stream(); + var decoder; // make node responseOnEnd() happy + + stream.req = request; + unzip.on('error', function (error) { + if (error && error.code === 'Z_BUF_ERROR') { + // unexpected end of file is ignored by browsers and curl + stream.emit('end'); + return; + } + + stream.emit('error', error); + }); // pipe to unzip + + res.pipe(unzip); // override `setEncoding` to capture encoding + + res.setEncoding = function (type) { + decoder = new StringDecoder(type); + }; // decode upon decompressing with captured encoding + + + unzip.on('data', function (buf) { + if (decoder) { + var string_ = decoder.write(buf); + if (string_.length > 0) stream.emit('data', string_); + } else { + stream.emit('data', buf); + } + }); + unzip.on('end', function () { + stream.emit('end'); + }); // override `on` to capture data listeners + + var _on = res.on; + + res.on = function (type, fn) { + if (type === 'data' || type === 'end') { + stream.on(type, fn.bind(res)); + } else if (type === 'error') { + stream.on(type, fn.bind(res)); + + _on.call(res, type, fn); + } else { + _on.call(res, type, fn); + } + + return this; + }; +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJyZXF1aXJlIiwiU3RyaW5nRGVjb2RlciIsIlN0cmVhbSIsInpsaWIiLCJleHBvcnRzIiwidW56aXAiLCJyZXF1ZXN0IiwicmVzIiwiY3JlYXRlVW56aXAiLCJzdHJlYW0iLCJkZWNvZGVyIiwicmVxIiwib24iLCJlcnJvciIsImNvZGUiLCJlbWl0IiwicGlwZSIsInNldEVuY29kaW5nIiwidHlwZSIsImJ1ZiIsInN0cmluZ18iLCJ3cml0ZSIsImxlbmd0aCIsIl9vbiIsImZuIiwiYmluZCIsImNhbGwiXSwic291cmNlcyI6WyIuLi8uLi9zcmMvbm9kZS91bnppcC5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyIvKipcbiAqIE1vZHVsZSBkZXBlbmRlbmNpZXMuXG4gKi9cblxuY29uc3QgeyBTdHJpbmdEZWNvZGVyIH0gPSByZXF1aXJlKCdzdHJpbmdfZGVjb2RlcicpO1xuY29uc3QgU3RyZWFtID0gcmVxdWlyZSgnc3RyZWFtJyk7XG5jb25zdCB6bGliID0gcmVxdWlyZSgnemxpYicpO1xuXG4vKipcbiAqIEJ1ZmZlcnMgcmVzcG9uc2UgZGF0YSBldmVudHMgYW5kIHJlLWVtaXRzIHdoZW4gdGhleSdyZSB1bnppcHBlZC5cbiAqXG4gKiBAcGFyYW0ge1JlcXVlc3R9IHJlcVxuICogQHBhcmFtIHtSZXNwb25zZX0gcmVzXG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5leHBvcnRzLnVuemlwID0gKHJlcXVlc3QsIHJlcykgPT4ge1xuICBjb25zdCB1bnppcCA9IHpsaWIuY3JlYXRlVW56aXAoKTtcbiAgY29uc3Qgc3RyZWFtID0gbmV3IFN0cmVhbSgpO1xuICBsZXQgZGVjb2RlcjtcblxuICAvLyBtYWtlIG5vZGUgcmVzcG9uc2VPbkVuZCgpIGhhcHB5XG4gIHN0cmVhbS5yZXEgPSByZXF1ZXN0O1xuXG4gIHVuemlwLm9uKCdlcnJvcicsIChlcnJvcikgPT4ge1xuICAgIGlmIChlcnJvciAmJiBlcnJvci5jb2RlID09PSAnWl9CVUZfRVJST1InKSB7XG4gICAgICAvLyB1bmV4cGVjdGVkIGVuZCBvZiBmaWxlIGlzIGlnbm9yZWQgYnkgYnJvd3NlcnMgYW5kIGN1cmxcbiAgICAgIHN0cmVhbS5lbWl0KCdlbmQnKTtcbiAgICAgIHJldHVybjtcbiAgICB9XG5cbiAgICBzdHJlYW0uZW1pdCgnZXJyb3InLCBlcnJvcik7XG4gIH0pO1xuXG4gIC8vIHBpcGUgdG8gdW56aXBcbiAgcmVzLnBpcGUodW56aXApO1xuXG4gIC8vIG92ZXJyaWRlIGBzZXRFbmNvZGluZ2AgdG8gY2FwdHVyZSBlbmNvZGluZ1xuICByZXMuc2V0RW5jb2RpbmcgPSAodHlwZSkgPT4ge1xuICAgIGRlY29kZXIgPSBuZXcgU3RyaW5nRGVjb2Rlcih0eXBlKTtcbiAgfTtcblxuICAvLyBkZWNvZGUgdXBvbiBkZWNvbXByZXNzaW5nIHdpdGggY2FwdHVyZWQgZW5jb2RpbmdcbiAgdW56aXAub24oJ2RhdGEnLCAoYnVmKSA9PiB7XG4gICAgaWYgKGRlY29kZXIpIHtcbiAgICAgIGNvbnN0IHN0cmluZ18gPSBkZWNvZGVyLndyaXRlKGJ1Zik7XG4gICAgICBpZiAoc3RyaW5nXy5sZW5ndGggPiAwKSBzdHJlYW0uZW1pdCgnZGF0YScsIHN0cmluZ18pO1xuICAgIH0gZWxzZSB7XG4gICAgICBzdHJlYW0uZW1pdCgnZGF0YScsIGJ1Zik7XG4gICAgfVxuICB9KTtcblxuICB1bnppcC5vbignZW5kJywgKCkgPT4ge1xuICAgIHN0cmVhbS5lbWl0KCdlbmQnKTtcbiAgfSk7XG5cbiAgLy8gb3ZlcnJpZGUgYG9uYCB0byBjYXB0dXJlIGRhdGEgbGlzdGVuZXJzXG4gIGNvbnN0IF9vbiA9IHJlcy5vbjtcbiAgcmVzLm9uID0gZnVuY3Rpb24gKHR5cGUsIGZuKSB7XG4gICAgaWYgKHR5cGUgPT09ICdkYXRhJyB8fCB0eXBlID09PSAnZW5kJykge1xuICAgICAgc3RyZWFtLm9uKHR5cGUsIGZuLmJpbmQocmVzKSk7XG4gICAgfSBlbHNlIGlmICh0eXBlID09PSAnZXJyb3InKSB7XG4gICAgICBzdHJlYW0ub24odHlwZSwgZm4uYmluZChyZXMpKTtcbiAgICAgIF9vbi5jYWxsKHJlcywgdHlwZSwgZm4pO1xuICAgIH0gZWxzZSB7XG4gICAgICBfb24uY2FsbChyZXMsIHR5cGUsIGZuKTtcbiAgICB9XG5cbiAgICByZXR1cm4gdGhpcztcbiAgfTtcbn07XG4iXSwibWFwcGluZ3MiOiI7O0FBQUE7QUFDQTtBQUNBO0FBRUEsZUFBMEJBLE9BQU8sQ0FBQyxnQkFBRCxDQUFqQztBQUFBLElBQVFDLGFBQVIsWUFBUUEsYUFBUjs7QUFDQSxJQUFNQyxNQUFNLEdBQUdGLE9BQU8sQ0FBQyxRQUFELENBQXRCOztBQUNBLElBQU1HLElBQUksR0FBR0gsT0FBTyxDQUFDLE1BQUQsQ0FBcEI7QUFFQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTs7O0FBRUFJLE9BQU8sQ0FBQ0MsS0FBUixHQUFnQixVQUFDQyxPQUFELEVBQVVDLEdBQVYsRUFBa0I7RUFDaEMsSUFBTUYsS0FBSyxHQUFHRixJQUFJLENBQUNLLFdBQUwsRUFBZDtFQUNBLElBQU1DLE1BQU0sR0FBRyxJQUFJUCxNQUFKLEVBQWY7RUFDQSxJQUFJUSxPQUFKLENBSGdDLENBS2hDOztFQUNBRCxNQUFNLENBQUNFLEdBQVAsR0FBYUwsT0FBYjtFQUVBRCxLQUFLLENBQUNPLEVBQU4sQ0FBUyxPQUFULEVBQWtCLFVBQUNDLEtBQUQsRUFBVztJQUMzQixJQUFJQSxLQUFLLElBQUlBLEtBQUssQ0FBQ0MsSUFBTixLQUFlLGFBQTVCLEVBQTJDO01BQ3pDO01BQ0FMLE1BQU0sQ0FBQ00sSUFBUCxDQUFZLEtBQVo7TUFDQTtJQUNEOztJQUVETixNQUFNLENBQUNNLElBQVAsQ0FBWSxPQUFaLEVBQXFCRixLQUFyQjtFQUNELENBUkQsRUFSZ0MsQ0FrQmhDOztFQUNBTixHQUFHLENBQUNTLElBQUosQ0FBU1gsS0FBVCxFQW5CZ0MsQ0FxQmhDOztFQUNBRSxHQUFHLENBQUNVLFdBQUosR0FBa0IsVUFBQ0MsSUFBRCxFQUFVO0lBQzFCUixPQUFPLEdBQUcsSUFBSVQsYUFBSixDQUFrQmlCLElBQWxCLENBQVY7RUFDRCxDQUZELENBdEJnQyxDQTBCaEM7OztFQUNBYixLQUFLLENBQUNPLEVBQU4sQ0FBUyxNQUFULEVBQWlCLFVBQUNPLEdBQUQsRUFBUztJQUN4QixJQUFJVCxPQUFKLEVBQWE7TUFDWCxJQUFNVSxPQUFPLEdBQUdWLE9BQU8sQ0FBQ1csS0FBUixDQUFjRixHQUFkLENBQWhCO01BQ0EsSUFBSUMsT0FBTyxDQUFDRSxNQUFSLEdBQWlCLENBQXJCLEVBQXdCYixNQUFNLENBQUNNLElBQVAsQ0FBWSxNQUFaLEVBQW9CSyxPQUFwQjtJQUN6QixDQUhELE1BR087TUFDTFgsTUFBTSxDQUFDTSxJQUFQLENBQVksTUFBWixFQUFvQkksR0FBcEI7SUFDRDtFQUNGLENBUEQ7RUFTQWQsS0FBSyxDQUFDTyxFQUFOLENBQVMsS0FBVCxFQUFnQixZQUFNO0lBQ3BCSCxNQUFNLENBQUNNLElBQVAsQ0FBWSxLQUFaO0VBQ0QsQ0FGRCxFQXBDZ0MsQ0F3Q2hDOztFQUNBLElBQU1RLEdBQUcsR0FBR2hCLEdBQUcsQ0FBQ0ssRUFBaEI7O0VBQ0FMLEdBQUcsQ0FBQ0ssRUFBSixHQUFTLFVBQVVNLElBQVYsRUFBZ0JNLEVBQWhCLEVBQW9CO0lBQzNCLElBQUlOLElBQUksS0FBSyxNQUFULElBQW1CQSxJQUFJLEtBQUssS0FBaEMsRUFBdUM7TUFDckNULE1BQU0sQ0FBQ0csRUFBUCxDQUFVTSxJQUFWLEVBQWdCTSxFQUFFLENBQUNDLElBQUgsQ0FBUWxCLEdBQVIsQ0FBaEI7SUFDRCxDQUZELE1BRU8sSUFBSVcsSUFBSSxLQUFLLE9BQWIsRUFBc0I7TUFDM0JULE1BQU0sQ0FBQ0csRUFBUCxDQUFVTSxJQUFWLEVBQWdCTSxFQUFFLENBQUNDLElBQUgsQ0FBUWxCLEdBQVIsQ0FBaEI7O01BQ0FnQixHQUFHLENBQUNHLElBQUosQ0FBU25CLEdBQVQsRUFBY1csSUFBZCxFQUFvQk0sRUFBcEI7SUFDRCxDQUhNLE1BR0E7TUFDTEQsR0FBRyxDQUFDRyxJQUFKLENBQVNuQixHQUFULEVBQWNXLElBQWQsRUFBb0JNLEVBQXBCO0lBQ0Q7O0lBRUQsT0FBTyxJQUFQO0VBQ0QsQ0FYRDtBQVlELENBdEREIn0= \ No newline at end of file diff --git a/node_modules/superagent/lib/request-base.js b/node_modules/superagent/lib/request-base.js new file mode 100644 index 0000000..c413c3d --- /dev/null +++ b/node_modules/superagent/lib/request-base.js @@ -0,0 +1,793 @@ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +var semver = require('semver'); +/** + * Module of mixed-in functions shared between node and client code + */ + + +var _require = require('./utils'), + isObject = _require.isObject, + hasOwn = _require.hasOwn; +/** + * Expose `RequestBase`. + */ + + +module.exports = RequestBase; +/** + * Initialize a new `RequestBase`. + * + * @api public + */ + +function RequestBase() {} +/** + * Clear previous timeout. + * + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.clearTimeout = function () { + clearTimeout(this._timer); + clearTimeout(this._responseTimeoutTimer); + clearTimeout(this._uploadTimeoutTimer); + delete this._timer; + delete this._responseTimeoutTimer; + delete this._uploadTimeoutTimer; + return this; +}; +/** + * Override default response body parser + * + * This function will be called to convert incoming data into request.body + * + * @param {Function} + * @api public + */ + + +RequestBase.prototype.parse = function (fn) { + this._parser = fn; + return this; +}; +/** + * Set format of binary response body. + * In browser valid formats are 'blob' and 'arraybuffer', + * which return Blob and ArrayBuffer, respectively. + * + * In Node all values result in Buffer. + * + * Examples: + * + * req.get('/') + * .responseType('blob') + * .end(callback); + * + * @param {String} val + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.responseType = function (value) { + this._responseType = value; + return this; +}; +/** + * Override default request body serializer + * + * This function will be called to convert data set via .send or .attach into payload to send + * + * @param {Function} + * @api public + */ + + +RequestBase.prototype.serialize = function (fn) { + this._serializer = fn; + return this; +}; +/** + * Set timeouts. + * + * - response timeout is time between sending request and receiving the first byte of the response. Includes DNS and connection time. + * - deadline is the time from start of the request to receiving response body in full. If the deadline is too short large files may not load at all on slow connections. + * - upload is the time since last bit of data was sent or received. This timeout works only if deadline timeout is off + * + * Value of 0 or false means no timeout. + * + * @param {Number|Object} ms or {response, deadline} + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.timeout = function (options) { + if (!options || _typeof(options) !== 'object') { + this._timeout = options; + this._responseTimeout = 0; + this._uploadTimeout = 0; + return this; + } + + for (var option in options) { + if (hasOwn(options, option)) { + switch (option) { + case 'deadline': + this._timeout = options.deadline; + break; + + case 'response': + this._responseTimeout = options.response; + break; + + case 'upload': + this._uploadTimeout = options.upload; + break; + + default: + console.warn('Unknown timeout option', option); + } + } + } + + return this; +}; +/** + * Set number of retry attempts on error. + * + * Failed requests will be retried 'count' times if timeout or err.code >= 500. + * + * @param {Number} count + * @param {Function} [fn] + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.retry = function (count, fn) { + // Default to 1 if no count passed or true + if (arguments.length === 0 || count === true) count = 1; + if (count <= 0) count = 0; + this._maxRetries = count; + this._retries = 0; + this._retryCallback = fn; + return this; +}; // +// NOTE: we do not include ESOCKETTIMEDOUT because that is from `request` package +// +// +// NOTE: we do not include EADDRINFO because it was removed from libuv in 2014 +// +// +// +// +// TODO: expose these as configurable defaults +// + + +var ERROR_CODES = new Set(['ETIMEDOUT', 'ECONNRESET', 'EADDRINUSE', 'ECONNREFUSED', 'EPIPE', 'ENOTFOUND', 'ENETUNREACH', 'EAI_AGAIN']); +var STATUS_CODES = new Set([408, 413, 429, 500, 502, 503, 504, 521, 522, 524]); // TODO: we would need to make this easily configurable before adding it in (e.g. some might want to add POST) +// const METHODS = new Set(['GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE']); + +/** + * Determine if a request should be retried. + * (Inspired by https://github.com/sindresorhus/got#retry) + * + * @param {Error} err an error + * @param {Response} [res] response + * @returns {Boolean} if segment should be retried + */ + +RequestBase.prototype._shouldRetry = function (error, res) { + if (!this._maxRetries || this._retries++ >= this._maxRetries) { + return false; + } + + if (this._retryCallback) { + try { + var override = this._retryCallback(error, res); + + if (override === true) return true; + if (override === false) return false; // undefined falls back to defaults + } catch (err) { + console.error(err); + } + } // TODO: we would need to make this easily configurable before adding it in (e.g. some might want to add POST) + + /* + if ( + this.req && + this.req.method && + !METHODS.has(this.req.method.toUpperCase()) + ) + return false; + */ + + + if (res && res.status && STATUS_CODES.has(res.status)) return true; + + if (error) { + if (error.code && ERROR_CODES.has(error.code)) return true; // Superagent timeout + + if (error.timeout && error.code === 'ECONNABORTED') return true; + if (error.crossDomain) return true; + } + + return false; +}; +/** + * Retry request + * + * @return {Request} for chaining + * @api private + */ + + +RequestBase.prototype._retry = function () { + this.clearTimeout(); // node + + if (this.req) { + this.req = null; + this.req = this.request(); + } + + this._aborted = false; + this.timedout = false; + this.timedoutError = null; + return this._end(); +}; +/** + * Promise support + * + * @param {Function} resolve + * @param {Function} [reject] + * @return {Request} + */ + + +RequestBase.prototype.then = function (resolve, reject) { + var _this = this; + + if (!this._fullfilledPromise) { + var self = this; + + if (this._endCalled) { + console.warn('Warning: superagent request was sent twice, because both .end() and .then() were called. Never call .end() if you use promises'); + } + + this._fullfilledPromise = new Promise(function (resolve, reject) { + self.on('abort', function () { + if (_this._maxRetries && _this._maxRetries > _this._retries) { + return; + } + + if (_this.timedout && _this.timedoutError) { + reject(_this.timedoutError); + return; + } + + var error = new Error('Aborted'); + error.code = 'ABORTED'; + error.status = _this.status; + error.method = _this.method; + error.url = _this.url; + reject(error); + }); + self.end(function (error, res) { + if (error) reject(error);else resolve(res); + }); + }); + } + + return this._fullfilledPromise.then(resolve, reject); +}; + +RequestBase.prototype.catch = function (callback) { + return this.then(undefined, callback); +}; +/** + * Allow for extension + */ + + +RequestBase.prototype.use = function (fn) { + fn(this); + return this; +}; + +RequestBase.prototype.ok = function (callback) { + if (typeof callback !== 'function') throw new Error('Callback required'); + this._okCallback = callback; + return this; +}; + +RequestBase.prototype._isResponseOK = function (res) { + if (!res) { + return false; + } + + if (this._okCallback) { + return this._okCallback(res); + } + + return res.status >= 200 && res.status < 300; +}; +/** + * Get request header `field`. + * Case-insensitive. + * + * @param {String} field + * @return {String} + * @api public + */ + + +RequestBase.prototype.get = function (field) { + return this._header[field.toLowerCase()]; +}; +/** + * Get case-insensitive header `field` value. + * This is a deprecated internal API. Use `.get(field)` instead. + * + * (getHeader is no longer used internally by the superagent code base) + * + * @param {String} field + * @return {String} + * @api private + * @deprecated + */ + + +RequestBase.prototype.getHeader = RequestBase.prototype.get; +/** + * Set header `field` to `val`, or multiple fields with one object. + * Case-insensitive. + * + * Examples: + * + * req.get('/') + * .set('Accept', 'application/json') + * .set('X-API-Key', 'foobar') + * .end(callback); + * + * req.get('/') + * .set({ Accept: 'application/json', 'X-API-Key': 'foobar' }) + * .end(callback); + * + * @param {String|Object} field + * @param {String} val + * @return {Request} for chaining + * @api public + */ + +RequestBase.prototype.set = function (field, value) { + if (isObject(field)) { + for (var key in field) { + if (hasOwn(field, key)) this.set(key, field[key]); + } + + return this; + } + + this._header[field.toLowerCase()] = value; + this.header[field] = value; + return this; +}; +/** + * Remove header `field`. + * Case-insensitive. + * + * Example: + * + * req.get('/') + * .unset('User-Agent') + * .end(callback); + * + * @param {String} field field name + */ + + +RequestBase.prototype.unset = function (field) { + delete this._header[field.toLowerCase()]; + delete this.header[field]; + return this; +}; +/** + * Write the field `name` and `val`, or multiple fields with one object + * for "multipart/form-data" request bodies. + * + * ``` js + * request.post('/upload') + * .field('foo', 'bar') + * .end(callback); + * + * request.post('/upload') + * .field({ foo: 'bar', baz: 'qux' }) + * .end(callback); + * ``` + * + * @param {String|Object} name name of field + * @param {String|Blob|File|Buffer|fs.ReadStream} val value of field + * @param {String} options extra options, e.g. 'blob' + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.field = function (name, value, options) { + // name should be either a string or an object. + if (name === null || undefined === name) { + throw new Error('.field(name, val) name can not be empty'); + } + + if (this._data) { + throw new Error(".field() can't be used if .send() is used. Please use only .send() or only .field() & .attach()"); + } + + if (isObject(name)) { + for (var key in name) { + if (hasOwn(name, key)) this.field(key, name[key]); + } + + return this; + } + + if (Array.isArray(value)) { + for (var i in value) { + if (hasOwn(value, i)) this.field(name, value[i]); + } + + return this; + } // val should be defined now + + + if (value === null || undefined === value) { + throw new Error('.field(name, val) val can not be empty'); + } + + if (typeof value === 'boolean') { + value = String(value); + } //fix https://github.com/visionmedia/superagent/issues/1680 + + + if (options) this._getFormData().append(name, value, options);else this._getFormData().append(name, value); + return this; +}; +/** + * Abort the request, and clear potential timeout. + * + * @return {Request} request + * @api public + */ + + +RequestBase.prototype.abort = function () { + if (this._aborted) { + return this; + } + + this._aborted = true; + if (this.xhr) this.xhr.abort(); // browser + + if (this.req) { + // Node v13 has major differences in `abort()` + // https://github.com/nodejs/node/blob/v12.x/lib/internal/streams/end-of-stream.js + // https://github.com/nodejs/node/blob/v13.x/lib/internal/streams/end-of-stream.js + // https://github.com/nodejs/node/blob/v14.x/lib/internal/streams/end-of-stream.js + // (if you run a diff across these you will see the differences) + // + // References: + // + // + // + // Thanks to @shadowgate15 and @niftylettuce + if (semver.gte(process.version, 'v13.0.0') && semver.lt(process.version, 'v14.0.0')) { + // Note that the reason this doesn't work is because in v13 as compared to v14 + // there is no `callback = nop` set in end-of-stream.js above + throw new Error('Superagent does not work in v13 properly with abort() due to Node.js core changes'); + } else if (semver.gte(process.version, 'v14.0.0')) { + // We have to manually set `destroyed` to `true` in order for this to work + // (see core internals of end-of-stream.js above in v14 branch as compared to v12) + this.req.destroyed = true; + } + + this.req.abort(); // node + } + + this.clearTimeout(); + this.emit('abort'); + return this; +}; + +RequestBase.prototype._auth = function (user, pass, options, base64Encoder) { + switch (options.type) { + case 'basic': + this.set('Authorization', "Basic ".concat(base64Encoder("".concat(user, ":").concat(pass)))); + break; + + case 'auto': + this.username = user; + this.password = pass; + break; + + case 'bearer': + // usage would be .auth(accessToken, { type: 'bearer' }) + this.set('Authorization', "Bearer ".concat(user)); + break; + + default: + break; + } + + return this; +}; +/** + * Enable transmission of cookies with x-domain requests. + * + * Note that for this to work the origin must not be + * using "Access-Control-Allow-Origin" with a wildcard, + * and also must set "Access-Control-Allow-Credentials" + * to "true". + * + * @api public + */ + + +RequestBase.prototype.withCredentials = function (on) { + // This is browser-only functionality. Node side is no-op. + if (on === undefined) on = true; + this._withCredentials = on; + return this; +}; +/** + * Set the max redirects to `n`. Does nothing in browser XHR implementation. + * + * @param {Number} n + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.redirects = function (n) { + this._maxRedirects = n; + return this; +}; +/** + * Maximum size of buffered response body, in bytes. Counts uncompressed size. + * Default 200MB. + * + * @param {Number} n number of bytes + * @return {Request} for chaining + */ + + +RequestBase.prototype.maxResponseSize = function (n) { + if (typeof n !== 'number') { + throw new TypeError('Invalid argument'); + } + + this._maxResponseSize = n; + return this; +}; +/** + * Convert to a plain javascript object (not JSON string) of scalar properties. + * Note as this method is designed to return a useful non-this value, + * it cannot be chained. + * + * @return {Object} describing method, url, and data of this request + * @api public + */ + + +RequestBase.prototype.toJSON = function () { + return { + method: this.method, + url: this.url, + data: this._data, + headers: this._header + }; +}; +/** + * Send `data` as the request body, defaulting the `.type()` to "json" when + * an object is given. + * + * Examples: + * + * // manual json + * request.post('/user') + * .type('json') + * .send('{"name":"tj"}') + * .end(callback) + * + * // auto json + * request.post('/user') + * .send({ name: 'tj' }) + * .end(callback) + * + * // manual x-www-form-urlencoded + * request.post('/user') + * .type('form') + * .send('name=tj') + * .end(callback) + * + * // auto x-www-form-urlencoded + * request.post('/user') + * .type('form') + * .send({ name: 'tj' }) + * .end(callback) + * + * // defaults to x-www-form-urlencoded + * request.post('/user') + * .send('name=tobi') + * .send('species=ferret') + * .end(callback) + * + * @param {String|Object} data + * @return {Request} for chaining + * @api public + */ +// eslint-disable-next-line complexity + + +RequestBase.prototype.send = function (data) { + var isObject_ = isObject(data); + var type = this._header['content-type']; + + if (this._formData) { + throw new Error(".send() can't be used if .attach() or .field() is used. Please use only .send() or only .field() & .attach()"); + } + + if (isObject_ && !this._data) { + if (Array.isArray(data)) { + this._data = []; + } else if (!this._isHost(data)) { + this._data = {}; + } + } else if (data && this._data && this._isHost(this._data)) { + throw new Error("Can't merge these send calls"); + } // merge + + + if (isObject_ && isObject(this._data)) { + for (var key in data) { + if (hasOwn(data, key)) this._data[key] = data[key]; + } + } else if (typeof data === 'string') { + // default to x-www-form-urlencoded + if (!type) this.type('form'); + type = this._header['content-type']; + if (type) type = type.toLowerCase().trim(); + + if (type === 'application/x-www-form-urlencoded') { + this._data = this._data ? "".concat(this._data, "&").concat(data) : data; + } else { + this._data = (this._data || '') + data; + } + } else { + this._data = data; + } + + if (!isObject_ || this._isHost(data)) { + return this; + } // default to json + + + if (!type) this.type('json'); + return this; +}; +/** + * Sort `querystring` by the sort function + * + * + * Examples: + * + * // default order + * request.get('/user') + * .query('name=Nick') + * .query('search=Manny') + * .sortQuery() + * .end(callback) + * + * // customized sort function + * request.get('/user') + * .query('name=Nick') + * .query('search=Manny') + * .sortQuery(function(a, b){ + * return a.length - b.length; + * }) + * .end(callback) + * + * + * @param {Function} sort + * @return {Request} for chaining + * @api public + */ + + +RequestBase.prototype.sortQuery = function (sort) { + // _sort default to true but otherwise can be a function or boolean + this._sort = typeof sort === 'undefined' ? true : sort; + return this; +}; +/** + * Compose querystring to append to req.url + * + * @api private + */ + + +RequestBase.prototype._finalizeQueryString = function () { + var query = this._query.join('&'); + + if (query) { + this.url += (this.url.includes('?') ? '&' : '?') + query; + } + + this._query.length = 0; // Makes the call idempotent + + if (this._sort) { + var index = this.url.indexOf('?'); + + if (index >= 0) { + var queryArray = this.url.slice(index + 1).split('&'); + + if (typeof this._sort === 'function') { + queryArray.sort(this._sort); + } else { + queryArray.sort(); + } + + this.url = this.url.slice(0, index) + '?' + queryArray.join('&'); + } + } +}; // For backwards compat only + + +RequestBase.prototype._appendQueryString = function () { + console.warn('Unsupported'); +}; +/** + * Invoke callback with timeout error. + * + * @api private + */ + + +RequestBase.prototype._timeoutError = function (reason, timeout, errno) { + if (this._aborted) { + return; + } + + var error = new Error("".concat(reason + timeout, "ms exceeded")); + error.timeout = timeout; + error.code = 'ECONNABORTED'; + error.errno = errno; + this.timedout = true; + this.timedoutError = error; + this.abort(); + this.callback(error); +}; + +RequestBase.prototype._setTimeouts = function () { + var self = this; // deadline + + if (this._timeout && !this._timer) { + this._timer = setTimeout(function () { + self._timeoutError('Timeout of ', self._timeout, 'ETIME'); + }, this._timeout); + } // response timeout + + + if (this._responseTimeout && !this._responseTimeoutTimer) { + this._responseTimeoutTimer = setTimeout(function () { + self._timeoutError('Response timeout of ', self._responseTimeout, 'ETIMEDOUT'); + }, this._responseTimeout); + } +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["semver","require","isObject","hasOwn","module","exports","RequestBase","prototype","clearTimeout","_timer","_responseTimeoutTimer","_uploadTimeoutTimer","parse","fn","_parser","responseType","value","_responseType","serialize","_serializer","timeout","options","_timeout","_responseTimeout","_uploadTimeout","option","deadline","response","upload","console","warn","retry","count","arguments","length","_maxRetries","_retries","_retryCallback","ERROR_CODES","Set","STATUS_CODES","_shouldRetry","error","res","override","err","status","has","code","crossDomain","_retry","req","request","_aborted","timedout","timedoutError","_end","then","resolve","reject","_fullfilledPromise","self","_endCalled","Promise","on","Error","method","url","end","catch","callback","undefined","use","ok","_okCallback","_isResponseOK","get","field","_header","toLowerCase","getHeader","set","key","header","unset","name","_data","Array","isArray","i","String","_getFormData","append","abort","xhr","gte","process","version","lt","destroyed","emit","_auth","user","pass","base64Encoder","type","username","password","withCredentials","_withCredentials","redirects","n","_maxRedirects","maxResponseSize","TypeError","_maxResponseSize","toJSON","data","headers","send","isObject_","_formData","_isHost","trim","sortQuery","sort","_sort","_finalizeQueryString","query","_query","join","includes","index","indexOf","queryArray","slice","split","_appendQueryString","_timeoutError","reason","errno","_setTimeouts","setTimeout"],"sources":["../src/request-base.js"],"sourcesContent":["const semver = require('semver');\n\n/**\n * Module of mixed-in functions shared between node and client code\n */\nconst { isObject, hasOwn } = require('./utils');\n\n/**\n * Expose `RequestBase`.\n */\n\nmodule.exports = RequestBase;\n\n/**\n * Initialize a new `RequestBase`.\n *\n * @api public\n */\n\nfunction RequestBase() { }\n\n/**\n * Clear previous timeout.\n *\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.clearTimeout = function () {\n  clearTimeout(this._timer);\n  clearTimeout(this._responseTimeoutTimer);\n  clearTimeout(this._uploadTimeoutTimer);\n  delete this._timer;\n  delete this._responseTimeoutTimer;\n  delete this._uploadTimeoutTimer;\n  return this;\n};\n\n/**\n * Override default response body parser\n *\n * This function will be called to convert incoming data into request.body\n *\n * @param {Function}\n * @api public\n */\n\nRequestBase.prototype.parse = function (fn) {\n  this._parser = fn;\n  return this;\n};\n\n/**\n * Set format of binary response body.\n * In browser valid formats are 'blob' and 'arraybuffer',\n * which return Blob and ArrayBuffer, respectively.\n *\n * In Node all values result in Buffer.\n *\n * Examples:\n *\n *      req.get('/')\n *        .responseType('blob')\n *        .end(callback);\n *\n * @param {String} val\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.responseType = function (value) {\n  this._responseType = value;\n  return this;\n};\n\n/**\n * Override default request body serializer\n *\n * This function will be called to convert data set via .send or .attach into payload to send\n *\n * @param {Function}\n * @api public\n */\n\nRequestBase.prototype.serialize = function (fn) {\n  this._serializer = fn;\n  return this;\n};\n\n/**\n * Set timeouts.\n *\n * - response timeout is time between sending request and receiving the first byte of the response. Includes DNS and connection time.\n * - deadline is the time from start of the request to receiving response body in full. If the deadline is too short large files may not load at all on slow connections.\n * - upload is the time  since last bit of data was sent or received. This timeout works only if deadline timeout is off\n *\n * Value of 0 or false means no timeout.\n *\n * @param {Number|Object} ms or {response, deadline}\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.timeout = function (options) {\n  if (!options || typeof options !== 'object') {\n    this._timeout = options;\n    this._responseTimeout = 0;\n    this._uploadTimeout = 0;\n    return this;\n  }\n\n  for (const option in options) {\n    if (hasOwn(options, option)) {\n      switch (option) {\n        case 'deadline':\n          this._timeout = options.deadline;\n          break;\n        case 'response':\n          this._responseTimeout = options.response;\n          break;\n        case 'upload':\n          this._uploadTimeout = options.upload;\n          break;\n        default:\n          console.warn('Unknown timeout option', option);\n      }\n    }\n  }\n\n  return this;\n};\n\n/**\n * Set number of retry attempts on error.\n *\n * Failed requests will be retried 'count' times if timeout or err.code >= 500.\n *\n * @param {Number} count\n * @param {Function} [fn]\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.retry = function (count, fn) {\n  // Default to 1 if no count passed or true\n  if (arguments.length === 0 || count === true) count = 1;\n  if (count <= 0) count = 0;\n  this._maxRetries = count;\n  this._retries = 0;\n  this._retryCallback = fn;\n  return this;\n};\n\n//\n// NOTE: we do not include ESOCKETTIMEDOUT because that is from `request` package\n//       <https://github.com/sindresorhus/got/pull/537>\n//\n// NOTE: we do not include EADDRINFO because it was removed from libuv in 2014\n//       <https://github.com/libuv/libuv/commit/02e1ebd40b807be5af46343ea873331b2ee4e9c1>\n//       <https://github.com/request/request/search?q=ESOCKETTIMEDOUT&unscoped_q=ESOCKETTIMEDOUT>\n//\n//\n// TODO: expose these as configurable defaults\n//\nconst ERROR_CODES = new Set([\n  'ETIMEDOUT',\n  'ECONNRESET',\n  'EADDRINUSE',\n  'ECONNREFUSED',\n  'EPIPE',\n  'ENOTFOUND',\n  'ENETUNREACH',\n  'EAI_AGAIN'\n]);\n\nconst STATUS_CODES = new Set([\n  408, 413, 429, 500, 502, 503, 504, 521, 522, 524\n]);\n\n// TODO: we would need to make this easily configurable before adding it in (e.g. some might want to add POST)\n// const METHODS = new Set(['GET', 'PUT', 'HEAD', 'DELETE', 'OPTIONS', 'TRACE']);\n\n/**\n * Determine if a request should be retried.\n * (Inspired by https://github.com/sindresorhus/got#retry)\n *\n * @param {Error} err an error\n * @param {Response} [res] response\n * @returns {Boolean} if segment should be retried\n */\nRequestBase.prototype._shouldRetry = function (error, res) {\n  if (!this._maxRetries || this._retries++ >= this._maxRetries) {\n    return false;\n  }\n\n  if (this._retryCallback) {\n    try {\n      const override = this._retryCallback(error, res);\n      if (override === true) return true;\n      if (override === false) return false;\n      // undefined falls back to defaults\n    } catch (err) {\n      console.error(err);\n    }\n  }\n\n  // TODO: we would need to make this easily configurable before adding it in (e.g. some might want to add POST)\n  /*\n  if (\n    this.req &&\n    this.req.method &&\n    !METHODS.has(this.req.method.toUpperCase())\n  )\n    return false;\n  */\n  if (res && res.status && STATUS_CODES.has(res.status)) return true;\n  if (error) {\n    if (error.code && ERROR_CODES.has(error.code)) return true;\n    // Superagent timeout\n    if (error.timeout && error.code === 'ECONNABORTED') return true;\n    if (error.crossDomain) return true;\n  }\n\n  return false;\n};\n\n/**\n * Retry request\n *\n * @return {Request} for chaining\n * @api private\n */\n\nRequestBase.prototype._retry = function () {\n  this.clearTimeout();\n\n  // node\n  if (this.req) {\n    this.req = null;\n    this.req = this.request();\n  }\n\n  this._aborted = false;\n  this.timedout = false;\n  this.timedoutError = null;\n\n  return this._end();\n};\n\n/**\n * Promise support\n *\n * @param {Function} resolve\n * @param {Function} [reject]\n * @return {Request}\n */\n\nRequestBase.prototype.then = function (resolve, reject) {\n  if (!this._fullfilledPromise) {\n    const self = this;\n    if (this._endCalled) {\n      console.warn(\n        'Warning: superagent request was sent twice, because both .end() and .then() were called. Never call .end() if you use promises'\n      );\n    }\n\n    this._fullfilledPromise = new Promise((resolve, reject) => {\n      self.on('abort', () => {\n        if (this._maxRetries && this._maxRetries > this._retries) {\n          return;\n        }\n\n        if (this.timedout && this.timedoutError) {\n          reject(this.timedoutError);\n          return;\n        }\n\n        const error = new Error('Aborted');\n        error.code = 'ABORTED';\n        error.status = this.status;\n        error.method = this.method;\n        error.url = this.url;\n        reject(error);\n      });\n      self.end((error, res) => {\n        if (error) reject(error);\n        else resolve(res);\n      });\n    });\n  }\n\n  return this._fullfilledPromise.then(resolve, reject);\n};\n\nRequestBase.prototype.catch = function (callback) {\n  return this.then(undefined, callback);\n};\n\n/**\n * Allow for extension\n */\n\nRequestBase.prototype.use = function (fn) {\n  fn(this);\n  return this;\n};\n\nRequestBase.prototype.ok = function (callback) {\n  if (typeof callback !== 'function') throw new Error('Callback required');\n  this._okCallback = callback;\n  return this;\n};\n\nRequestBase.prototype._isResponseOK = function (res) {\n  if (!res) {\n    return false;\n  }\n\n  if (this._okCallback) {\n    return this._okCallback(res);\n  }\n\n  return res.status >= 200 && res.status < 300;\n};\n\n/**\n * Get request header `field`.\n * Case-insensitive.\n *\n * @param {String} field\n * @return {String}\n * @api public\n */\n\nRequestBase.prototype.get = function (field) {\n  return this._header[field.toLowerCase()];\n};\n\n/**\n * Get case-insensitive header `field` value.\n * This is a deprecated internal API. Use `.get(field)` instead.\n *\n * (getHeader is no longer used internally by the superagent code base)\n *\n * @param {String} field\n * @return {String}\n * @api private\n * @deprecated\n */\n\nRequestBase.prototype.getHeader = RequestBase.prototype.get;\n\n/**\n * Set header `field` to `val`, or multiple fields with one object.\n * Case-insensitive.\n *\n * Examples:\n *\n *      req.get('/')\n *        .set('Accept', 'application/json')\n *        .set('X-API-Key', 'foobar')\n *        .end(callback);\n *\n *      req.get('/')\n *        .set({ Accept: 'application/json', 'X-API-Key': 'foobar' })\n *        .end(callback);\n *\n * @param {String|Object} field\n * @param {String} val\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.set = function (field, value) {\n  if (isObject(field)) {\n    for (const key in field) {\n      if (hasOwn(field, key)) this.set(key, field[key]);\n    }\n\n    return this;\n  }\n\n  this._header[field.toLowerCase()] = value;\n  this.header[field] = value;\n  return this;\n};\n\n/**\n * Remove header `field`.\n * Case-insensitive.\n *\n * Example:\n *\n *      req.get('/')\n *        .unset('User-Agent')\n *        .end(callback);\n *\n * @param {String} field field name\n */\nRequestBase.prototype.unset = function (field) {\n  delete this._header[field.toLowerCase()];\n  delete this.header[field];\n  return this;\n};\n\n/**\n * Write the field `name` and `val`, or multiple fields with one object\n * for \"multipart/form-data\" request bodies.\n *\n * ``` js\n * request.post('/upload')\n *   .field('foo', 'bar')\n *   .end(callback);\n *\n * request.post('/upload')\n *   .field({ foo: 'bar', baz: 'qux' })\n *   .end(callback);\n * ```\n *\n * @param {String|Object} name name of field\n * @param {String|Blob|File|Buffer|fs.ReadStream} val value of field\n * @param {String} options extra options, e.g. 'blob'\n * @return {Request} for chaining\n * @api public\n */\nRequestBase.prototype.field = function (name, value, options) {\n  // name should be either a string or an object.\n  if (name === null || undefined === name) {\n    throw new Error('.field(name, val) name can not be empty');\n  }\n\n  if (this._data) {\n    throw new Error(\n      \".field() can't be used if .send() is used. Please use only .send() or only .field() & .attach()\"\n    );\n  }\n\n  if (isObject(name)) {\n    for (const key in name) {\n      if (hasOwn(name, key)) this.field(key, name[key]);\n    }\n\n    return this;\n  }\n\n  if (Array.isArray(value)) {\n    for (const i in value) {\n      if (hasOwn(value, i)) this.field(name, value[i]);\n    }\n\n    return this;\n  }\n\n  // val should be defined now\n  if (value === null || undefined === value) {\n    throw new Error('.field(name, val) val can not be empty');\n  }\n\n  if (typeof value === 'boolean') {\n    value = String(value);\n  }\n\n  //fix https://github.com/visionmedia/superagent/issues/1680\n  if (options) this._getFormData().append(name, value, options);\n  else this._getFormData().append(name, value);\n\n  return this;\n};\n\n/**\n * Abort the request, and clear potential timeout.\n *\n * @return {Request} request\n * @api public\n */\nRequestBase.prototype.abort = function () {\n  if (this._aborted) {\n    return this;\n  }\n\n  this._aborted = true;\n  if (this.xhr) this.xhr.abort(); // browser\n  if (this.req) {\n    // Node v13 has major differences in `abort()`\n    // https://github.com/nodejs/node/blob/v12.x/lib/internal/streams/end-of-stream.js\n    // https://github.com/nodejs/node/blob/v13.x/lib/internal/streams/end-of-stream.js\n    // https://github.com/nodejs/node/blob/v14.x/lib/internal/streams/end-of-stream.js\n    // (if you run a diff across these you will see the differences)\n    //\n    // References:\n    // <https://github.com/nodejs/node/issues/31630>\n    // <https://github.com/visionmedia/superagent/pull/1084/commits/dc18679a7c5ccfc6046d882015e5126888973bc8>\n    //\n    // Thanks to @shadowgate15 and @niftylettuce\n    if (\n      semver.gte(process.version, 'v13.0.0') &&\n      semver.lt(process.version, 'v14.0.0')\n    ) {\n      // Note that the reason this doesn't work is because in v13 as compared to v14\n      // there is no `callback = nop` set in end-of-stream.js above\n      throw new Error(\n        'Superagent does not work in v13 properly with abort() due to Node.js core changes'\n      );\n    } else if (semver.gte(process.version, 'v14.0.0')) {\n      // We have to manually set `destroyed` to `true` in order for this to work\n      // (see core internals of end-of-stream.js above in v14 branch as compared to v12)\n      this.req.destroyed = true;\n    }\n\n    this.req.abort(); // node\n  }\n\n  this.clearTimeout();\n  this.emit('abort');\n  return this;\n};\n\nRequestBase.prototype._auth = function (user, pass, options, base64Encoder) {\n  switch (options.type) {\n    case 'basic':\n      this.set('Authorization', `Basic ${base64Encoder(`${user}:${pass}`)}`);\n      break;\n\n    case 'auto':\n      this.username = user;\n      this.password = pass;\n      break;\n\n    case 'bearer': // usage would be .auth(accessToken, { type: 'bearer' })\n      this.set('Authorization', `Bearer ${user}`);\n      break;\n    default:\n      break;\n  }\n\n  return this;\n};\n\n/**\n * Enable transmission of cookies with x-domain requests.\n *\n * Note that for this to work the origin must not be\n * using \"Access-Control-Allow-Origin\" with a wildcard,\n * and also must set \"Access-Control-Allow-Credentials\"\n * to \"true\".\n *\n * @api public\n */\n\nRequestBase.prototype.withCredentials = function (on) {\n  // This is browser-only functionality. Node side is no-op.\n  if (on === undefined) on = true;\n  this._withCredentials = on;\n  return this;\n};\n\n/**\n * Set the max redirects to `n`. Does nothing in browser XHR implementation.\n *\n * @param {Number} n\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.redirects = function (n) {\n  this._maxRedirects = n;\n  return this;\n};\n\n/**\n * Maximum size of buffered response body, in bytes. Counts uncompressed size.\n * Default 200MB.\n *\n * @param {Number} n number of bytes\n * @return {Request} for chaining\n */\nRequestBase.prototype.maxResponseSize = function (n) {\n  if (typeof n !== 'number') {\n    throw new TypeError('Invalid argument');\n  }\n\n  this._maxResponseSize = n;\n  return this;\n};\n\n/**\n * Convert to a plain javascript object (not JSON string) of scalar properties.\n * Note as this method is designed to return a useful non-this value,\n * it cannot be chained.\n *\n * @return {Object} describing method, url, and data of this request\n * @api public\n */\n\nRequestBase.prototype.toJSON = function () {\n  return {\n    method: this.method,\n    url: this.url,\n    data: this._data,\n    headers: this._header\n  };\n};\n\n/**\n * Send `data` as the request body, defaulting the `.type()` to \"json\" when\n * an object is given.\n *\n * Examples:\n *\n *       // manual json\n *       request.post('/user')\n *         .type('json')\n *         .send('{\"name\":\"tj\"}')\n *         .end(callback)\n *\n *       // auto json\n *       request.post('/user')\n *         .send({ name: 'tj' })\n *         .end(callback)\n *\n *       // manual x-www-form-urlencoded\n *       request.post('/user')\n *         .type('form')\n *         .send('name=tj')\n *         .end(callback)\n *\n *       // auto x-www-form-urlencoded\n *       request.post('/user')\n *         .type('form')\n *         .send({ name: 'tj' })\n *         .end(callback)\n *\n *       // defaults to x-www-form-urlencoded\n *      request.post('/user')\n *        .send('name=tobi')\n *        .send('species=ferret')\n *        .end(callback)\n *\n * @param {String|Object} data\n * @return {Request} for chaining\n * @api public\n */\n\n// eslint-disable-next-line complexity\nRequestBase.prototype.send = function (data) {\n  const isObject_ = isObject(data);\n  let type = this._header['content-type'];\n\n  if (this._formData) {\n    throw new Error(\n      \".send() can't be used if .attach() or .field() is used. Please use only .send() or only .field() & .attach()\"\n    );\n  }\n\n  if (isObject_ && !this._data) {\n    if (Array.isArray(data)) {\n      this._data = [];\n    } else if (!this._isHost(data)) {\n      this._data = {};\n    }\n  } else if (data && this._data && this._isHost(this._data)) {\n    throw new Error(\"Can't merge these send calls\");\n  }\n\n  // merge\n  if (isObject_ && isObject(this._data)) {\n    for (const key in data) {\n      if (hasOwn(data, key)) this._data[key] = data[key];\n    }\n  } else if (typeof data === 'string') {\n    // default to x-www-form-urlencoded\n    if (!type) this.type('form');\n    type = this._header['content-type'];\n    if (type) type = type.toLowerCase().trim();\n    if (type === 'application/x-www-form-urlencoded') {\n      this._data = this._data ? `${this._data}&${data}` : data;\n    } else {\n      this._data = (this._data || '') + data;\n    }\n  } else {\n    this._data = data;\n  }\n\n  if (!isObject_ || this._isHost(data)) {\n    return this;\n  }\n\n  // default to json\n  if (!type) this.type('json');\n  return this;\n};\n\n/**\n * Sort `querystring` by the sort function\n *\n *\n * Examples:\n *\n *       // default order\n *       request.get('/user')\n *         .query('name=Nick')\n *         .query('search=Manny')\n *         .sortQuery()\n *         .end(callback)\n *\n *       // customized sort function\n *       request.get('/user')\n *         .query('name=Nick')\n *         .query('search=Manny')\n *         .sortQuery(function(a, b){\n *           return a.length - b.length;\n *         })\n *         .end(callback)\n *\n *\n * @param {Function} sort\n * @return {Request} for chaining\n * @api public\n */\n\nRequestBase.prototype.sortQuery = function (sort) {\n  // _sort default to true but otherwise can be a function or boolean\n  this._sort = typeof sort === 'undefined' ? true : sort;\n  return this;\n};\n\n/**\n * Compose querystring to append to req.url\n *\n * @api private\n */\nRequestBase.prototype._finalizeQueryString = function () {\n  const query = this._query.join('&');\n  if (query) {\n    this.url += (this.url.includes('?') ? '&' : '?') + query;\n  }\n\n  this._query.length = 0; // Makes the call idempotent\n\n  if (this._sort) {\n    const index = this.url.indexOf('?');\n    if (index >= 0) {\n      const queryArray = this.url.slice(index + 1).split('&');\n      if (typeof this._sort === 'function') {\n        queryArray.sort(this._sort);\n      } else {\n        queryArray.sort();\n      }\n\n      this.url = this.url.slice(0, index) + '?' + queryArray.join('&');\n    }\n  }\n};\n\n// For backwards compat only\nRequestBase.prototype._appendQueryString = () => {\n  console.warn('Unsupported');\n};\n\n/**\n * Invoke callback with timeout error.\n *\n * @api private\n */\n\nRequestBase.prototype._timeoutError = function (reason, timeout, errno) {\n  if (this._aborted) {\n    return;\n  }\n\n  const error = new Error(`${reason + timeout}ms exceeded`);\n  error.timeout = timeout;\n  error.code = 'ECONNABORTED';\n  error.errno = errno;\n  this.timedout = true;\n  this.timedoutError = error;\n  this.abort();\n  this.callback(error);\n};\n\nRequestBase.prototype._setTimeouts = function () {\n  const self = this;\n\n  // deadline\n  if (this._timeout && !this._timer) {\n    this._timer = setTimeout(() => {\n      self._timeoutError('Timeout of ', self._timeout, 'ETIME');\n    }, this._timeout);\n  }\n\n  // response timeout\n  if (this._responseTimeout && !this._responseTimeoutTimer) {\n    this._responseTimeoutTimer = setTimeout(() => {\n      self._timeoutError(\n        'Response timeout of ',\n        self._responseTimeout,\n        'ETIMEDOUT'\n      );\n    }, this._responseTimeout);\n  }\n};\n"],"mappings":";;;;AAAA,IAAMA,MAAM,GAAGC,OAAO,CAAC,QAAD,CAAtB;AAEA;AACA;AACA;;;AACA,eAA6BA,OAAO,CAAC,SAAD,CAApC;AAAA,IAAQC,QAAR,YAAQA,QAAR;AAAA,IAAkBC,MAAlB,YAAkBA,MAAlB;AAEA;AACA;AACA;;;AAEAC,MAAM,CAACC,OAAP,GAAiBC,WAAjB;AAEA;AACA;AACA;AACA;AACA;;AAEA,SAASA,WAAT,GAAuB,CAAG;AAE1B;AACA;AACA;AACA;AACA;AACA;;;AAEAA,WAAW,CAACC,SAAZ,CAAsBC,YAAtB,GAAqC,YAAY;EAC/CA,YAAY,CAAC,KAAKC,MAAN,CAAZ;EACAD,YAAY,CAAC,KAAKE,qBAAN,CAAZ;EACAF,YAAY,CAAC,KAAKG,mBAAN,CAAZ;EACA,OAAO,KAAKF,MAAZ;EACA,OAAO,KAAKC,qBAAZ;EACA,OAAO,KAAKC,mBAAZ;EACA,OAAO,IAAP;AACD,CARD;AAUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAL,WAAW,CAACC,SAAZ,CAAsBK,KAAtB,GAA8B,UAAUC,EAAV,EAAc;EAC1C,KAAKC,OAAL,GAAeD,EAAf;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAP,WAAW,CAACC,SAAZ,CAAsBQ,YAAtB,GAAqC,UAAUC,KAAV,EAAiB;EACpD,KAAKC,aAAL,GAAqBD,KAArB;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAV,WAAW,CAACC,SAAZ,CAAsBW,SAAtB,GAAkC,UAAUL,EAAV,EAAc;EAC9C,KAAKM,WAAL,GAAmBN,EAAnB;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAP,WAAW,CAACC,SAAZ,CAAsBa,OAAtB,GAAgC,UAAUC,OAAV,EAAmB;EACjD,IAAI,CAACA,OAAD,IAAY,QAAOA,OAAP,MAAmB,QAAnC,EAA6C;IAC3C,KAAKC,QAAL,GAAgBD,OAAhB;IACA,KAAKE,gBAAL,GAAwB,CAAxB;IACA,KAAKC,cAAL,GAAsB,CAAtB;IACA,OAAO,IAAP;EACD;;EAED,KAAK,IAAMC,MAAX,IAAqBJ,OAArB,EAA8B;IAC5B,IAAIlB,MAAM,CAACkB,OAAD,EAAUI,MAAV,CAAV,EAA6B;MAC3B,QAAQA,MAAR;QACE,KAAK,UAAL;UACE,KAAKH,QAAL,GAAgBD,OAAO,CAACK,QAAxB;UACA;;QACF,KAAK,UAAL;UACE,KAAKH,gBAAL,GAAwBF,OAAO,CAACM,QAAhC;UACA;;QACF,KAAK,QAAL;UACE,KAAKH,cAAL,GAAsBH,OAAO,CAACO,MAA9B;UACA;;QACF;UACEC,OAAO,CAACC,IAAR,CAAa,wBAAb,EAAuCL,MAAvC;MAXJ;IAaD;EACF;;EAED,OAAO,IAAP;AACD,CA3BD;AA6BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAnB,WAAW,CAACC,SAAZ,CAAsBwB,KAAtB,GAA8B,UAAUC,KAAV,EAAiBnB,EAAjB,EAAqB;EACjD;EACA,IAAIoB,SAAS,CAACC,MAAV,KAAqB,CAArB,IAA0BF,KAAK,KAAK,IAAxC,EAA8CA,KAAK,GAAG,CAAR;EAC9C,IAAIA,KAAK,IAAI,CAAb,EAAgBA,KAAK,GAAG,CAAR;EAChB,KAAKG,WAAL,GAAmBH,KAAnB;EACA,KAAKI,QAAL,GAAgB,CAAhB;EACA,KAAKC,cAAL,GAAsBxB,EAAtB;EACA,OAAO,IAAP;AACD,CARD,C,CAUA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AACA,IAAMyB,WAAW,GAAG,IAAIC,GAAJ,CAAQ,CAC1B,WAD0B,EAE1B,YAF0B,EAG1B,YAH0B,EAI1B,cAJ0B,EAK1B,OAL0B,EAM1B,WAN0B,EAO1B,aAP0B,EAQ1B,WAR0B,CAAR,CAApB;AAWA,IAAMC,YAAY,GAAG,IAAID,GAAJ,CAAQ,CAC3B,GAD2B,EACtB,GADsB,EACjB,GADiB,EACZ,GADY,EACP,GADO,EACF,GADE,EACG,GADH,EACQ,GADR,EACa,GADb,EACkB,GADlB,CAAR,CAArB,C,CAIA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AACAjC,WAAW,CAACC,SAAZ,CAAsBkC,YAAtB,GAAqC,UAAUC,KAAV,EAAiBC,GAAjB,EAAsB;EACzD,IAAI,CAAC,KAAKR,WAAN,IAAqB,KAAKC,QAAL,MAAmB,KAAKD,WAAjD,EAA8D;IAC5D,OAAO,KAAP;EACD;;EAED,IAAI,KAAKE,cAAT,EAAyB;IACvB,IAAI;MACF,IAAMO,QAAQ,GAAG,KAAKP,cAAL,CAAoBK,KAApB,EAA2BC,GAA3B,CAAjB;;MACA,IAAIC,QAAQ,KAAK,IAAjB,EAAuB,OAAO,IAAP;MACvB,IAAIA,QAAQ,KAAK,KAAjB,EAAwB,OAAO,KAAP,CAHtB,CAIF;IACD,CALD,CAKE,OAAOC,GAAP,EAAY;MACZhB,OAAO,CAACa,KAAR,CAAcG,GAAd;IACD;EACF,CAdwD,CAgBzD;;EACA;AACF;AACA;AACA;AACA;AACA;AACA;AACA;;;EACE,IAAIF,GAAG,IAAIA,GAAG,CAACG,MAAX,IAAqBN,YAAY,CAACO,GAAb,CAAiBJ,GAAG,CAACG,MAArB,CAAzB,EAAuD,OAAO,IAAP;;EACvD,IAAIJ,KAAJ,EAAW;IACT,IAAIA,KAAK,CAACM,IAAN,IAAcV,WAAW,CAACS,GAAZ,CAAgBL,KAAK,CAACM,IAAtB,CAAlB,EAA+C,OAAO,IAAP,CADtC,CAET;;IACA,IAAIN,KAAK,CAACtB,OAAN,IAAiBsB,KAAK,CAACM,IAAN,KAAe,cAApC,EAAoD,OAAO,IAAP;IACpD,IAAIN,KAAK,CAACO,WAAV,EAAuB,OAAO,IAAP;EACxB;;EAED,OAAO,KAAP;AACD,CAlCD;AAoCA;AACA;AACA;AACA;AACA;AACA;;;AAEA3C,WAAW,CAACC,SAAZ,CAAsB2C,MAAtB,GAA+B,YAAY;EACzC,KAAK1C,YAAL,GADyC,CAGzC;;EACA,IAAI,KAAK2C,GAAT,EAAc;IACZ,KAAKA,GAAL,GAAW,IAAX;IACA,KAAKA,GAAL,GAAW,KAAKC,OAAL,EAAX;EACD;;EAED,KAAKC,QAAL,GAAgB,KAAhB;EACA,KAAKC,QAAL,GAAgB,KAAhB;EACA,KAAKC,aAAL,GAAqB,IAArB;EAEA,OAAO,KAAKC,IAAL,EAAP;AACD,CAdD;AAgBA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAlD,WAAW,CAACC,SAAZ,CAAsBkD,IAAtB,GAA6B,UAAUC,OAAV,EAAmBC,MAAnB,EAA2B;EAAA;;EACtD,IAAI,CAAC,KAAKC,kBAAV,EAA8B;IAC5B,IAAMC,IAAI,GAAG,IAAb;;IACA,IAAI,KAAKC,UAAT,EAAqB;MACnBjC,OAAO,CAACC,IAAR,CACE,gIADF;IAGD;;IAED,KAAK8B,kBAAL,GAA0B,IAAIG,OAAJ,CAAY,UAACL,OAAD,EAAUC,MAAV,EAAqB;MACzDE,IAAI,CAACG,EAAL,CAAQ,OAAR,EAAiB,YAAM;QACrB,IAAI,KAAI,CAAC7B,WAAL,IAAoB,KAAI,CAACA,WAAL,GAAmB,KAAI,CAACC,QAAhD,EAA0D;UACxD;QACD;;QAED,IAAI,KAAI,CAACkB,QAAL,IAAiB,KAAI,CAACC,aAA1B,EAAyC;UACvCI,MAAM,CAAC,KAAI,CAACJ,aAAN,CAAN;UACA;QACD;;QAED,IAAMb,KAAK,GAAG,IAAIuB,KAAJ,CAAU,SAAV,CAAd;QACAvB,KAAK,CAACM,IAAN,GAAa,SAAb;QACAN,KAAK,CAACI,MAAN,GAAe,KAAI,CAACA,MAApB;QACAJ,KAAK,CAACwB,MAAN,GAAe,KAAI,CAACA,MAApB;QACAxB,KAAK,CAACyB,GAAN,GAAY,KAAI,CAACA,GAAjB;QACAR,MAAM,CAACjB,KAAD,CAAN;MACD,CAhBD;MAiBAmB,IAAI,CAACO,GAAL,CAAS,UAAC1B,KAAD,EAAQC,GAAR,EAAgB;QACvB,IAAID,KAAJ,EAAWiB,MAAM,CAACjB,KAAD,CAAN,CAAX,KACKgB,OAAO,CAACf,GAAD,CAAP;MACN,CAHD;IAID,CAtByB,CAA1B;EAuBD;;EAED,OAAO,KAAKiB,kBAAL,CAAwBH,IAAxB,CAA6BC,OAA7B,EAAsCC,MAAtC,CAAP;AACD,CAnCD;;AAqCArD,WAAW,CAACC,SAAZ,CAAsB8D,KAAtB,GAA8B,UAAUC,QAAV,EAAoB;EAChD,OAAO,KAAKb,IAAL,CAAUc,SAAV,EAAqBD,QAArB,CAAP;AACD,CAFD;AAIA;AACA;AACA;;;AAEAhE,WAAW,CAACC,SAAZ,CAAsBiE,GAAtB,GAA4B,UAAU3D,EAAV,EAAc;EACxCA,EAAE,CAAC,IAAD,CAAF;EACA,OAAO,IAAP;AACD,CAHD;;AAKAP,WAAW,CAACC,SAAZ,CAAsBkE,EAAtB,GAA2B,UAAUH,QAAV,EAAoB;EAC7C,IAAI,OAAOA,QAAP,KAAoB,UAAxB,EAAoC,MAAM,IAAIL,KAAJ,CAAU,mBAAV,CAAN;EACpC,KAAKS,WAAL,GAAmBJ,QAAnB;EACA,OAAO,IAAP;AACD,CAJD;;AAMAhE,WAAW,CAACC,SAAZ,CAAsBoE,aAAtB,GAAsC,UAAUhC,GAAV,EAAe;EACnD,IAAI,CAACA,GAAL,EAAU;IACR,OAAO,KAAP;EACD;;EAED,IAAI,KAAK+B,WAAT,EAAsB;IACpB,OAAO,KAAKA,WAAL,CAAiB/B,GAAjB,CAAP;EACD;;EAED,OAAOA,GAAG,CAACG,MAAJ,IAAc,GAAd,IAAqBH,GAAG,CAACG,MAAJ,GAAa,GAAzC;AACD,CAVD;AAYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAxC,WAAW,CAACC,SAAZ,CAAsBqE,GAAtB,GAA4B,UAAUC,KAAV,EAAiB;EAC3C,OAAO,KAAKC,OAAL,CAAaD,KAAK,CAACE,WAAN,EAAb,CAAP;AACD,CAFD;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAzE,WAAW,CAACC,SAAZ,CAAsByE,SAAtB,GAAkC1E,WAAW,CAACC,SAAZ,CAAsBqE,GAAxD;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEAtE,WAAW,CAACC,SAAZ,CAAsB0E,GAAtB,GAA4B,UAAUJ,KAAV,EAAiB7D,KAAjB,EAAwB;EAClD,IAAId,QAAQ,CAAC2E,KAAD,CAAZ,EAAqB;IACnB,KAAK,IAAMK,GAAX,IAAkBL,KAAlB,EAAyB;MACvB,IAAI1E,MAAM,CAAC0E,KAAD,EAAQK,GAAR,CAAV,EAAwB,KAAKD,GAAL,CAASC,GAAT,EAAcL,KAAK,CAACK,GAAD,CAAnB;IACzB;;IAED,OAAO,IAAP;EACD;;EAED,KAAKJ,OAAL,CAAaD,KAAK,CAACE,WAAN,EAAb,IAAoC/D,KAApC;EACA,KAAKmE,MAAL,CAAYN,KAAZ,IAAqB7D,KAArB;EACA,OAAO,IAAP;AACD,CAZD;AAcA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AACAV,WAAW,CAACC,SAAZ,CAAsB6E,KAAtB,GAA8B,UAAUP,KAAV,EAAiB;EAC7C,OAAO,KAAKC,OAAL,CAAaD,KAAK,CAACE,WAAN,EAAb,CAAP;EACA,OAAO,KAAKI,MAAL,CAAYN,KAAZ,CAAP;EACA,OAAO,IAAP;AACD,CAJD;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AACAvE,WAAW,CAACC,SAAZ,CAAsBsE,KAAtB,GAA8B,UAAUQ,IAAV,EAAgBrE,KAAhB,EAAuBK,OAAvB,EAAgC;EAC5D;EACA,IAAIgE,IAAI,KAAK,IAAT,IAAiBd,SAAS,KAAKc,IAAnC,EAAyC;IACvC,MAAM,IAAIpB,KAAJ,CAAU,yCAAV,CAAN;EACD;;EAED,IAAI,KAAKqB,KAAT,EAAgB;IACd,MAAM,IAAIrB,KAAJ,CACJ,iGADI,CAAN;EAGD;;EAED,IAAI/D,QAAQ,CAACmF,IAAD,CAAZ,EAAoB;IAClB,KAAK,IAAMH,GAAX,IAAkBG,IAAlB,EAAwB;MACtB,IAAIlF,MAAM,CAACkF,IAAD,EAAOH,GAAP,CAAV,EAAuB,KAAKL,KAAL,CAAWK,GAAX,EAAgBG,IAAI,CAACH,GAAD,CAApB;IACxB;;IAED,OAAO,IAAP;EACD;;EAED,IAAIK,KAAK,CAACC,OAAN,CAAcxE,KAAd,CAAJ,EAA0B;IACxB,KAAK,IAAMyE,CAAX,IAAgBzE,KAAhB,EAAuB;MACrB,IAAIb,MAAM,CAACa,KAAD,EAAQyE,CAAR,CAAV,EAAsB,KAAKZ,KAAL,CAAWQ,IAAX,EAAiBrE,KAAK,CAACyE,CAAD,CAAtB;IACvB;;IAED,OAAO,IAAP;EACD,CA1B2D,CA4B5D;;;EACA,IAAIzE,KAAK,KAAK,IAAV,IAAkBuD,SAAS,KAAKvD,KAApC,EAA2C;IACzC,MAAM,IAAIiD,KAAJ,CAAU,wCAAV,CAAN;EACD;;EAED,IAAI,OAAOjD,KAAP,KAAiB,SAArB,EAAgC;IAC9BA,KAAK,GAAG0E,MAAM,CAAC1E,KAAD,CAAd;EACD,CAnC2D,CAqC5D;;;EACA,IAAIK,OAAJ,EAAa,KAAKsE,YAAL,GAAoBC,MAApB,CAA2BP,IAA3B,EAAiCrE,KAAjC,EAAwCK,OAAxC,EAAb,KACK,KAAKsE,YAAL,GAAoBC,MAApB,CAA2BP,IAA3B,EAAiCrE,KAAjC;EAEL,OAAO,IAAP;AACD,CA1CD;AA4CA;AACA;AACA;AACA;AACA;AACA;;;AACAV,WAAW,CAACC,SAAZ,CAAsBsF,KAAtB,GAA8B,YAAY;EACxC,IAAI,KAAKxC,QAAT,EAAmB;IACjB,OAAO,IAAP;EACD;;EAED,KAAKA,QAAL,GAAgB,IAAhB;EACA,IAAI,KAAKyC,GAAT,EAAc,KAAKA,GAAL,CAASD,KAAT,GAN0B,CAMR;;EAChC,IAAI,KAAK1C,GAAT,EAAc;IACZ;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA,IACEnD,MAAM,CAAC+F,GAAP,CAAWC,OAAO,CAACC,OAAnB,EAA4B,SAA5B,KACAjG,MAAM,CAACkG,EAAP,CAAUF,OAAO,CAACC,OAAlB,EAA2B,SAA3B,CAFF,EAGE;MACA;MACA;MACA,MAAM,IAAIhC,KAAJ,CACJ,mFADI,CAAN;IAGD,CATD,MASO,IAAIjE,MAAM,CAAC+F,GAAP,CAAWC,OAAO,CAACC,OAAnB,EAA4B,SAA5B,CAAJ,EAA4C;MACjD;MACA;MACA,KAAK9C,GAAL,CAASgD,SAAT,GAAqB,IAArB;IACD;;IAED,KAAKhD,GAAL,CAAS0C,KAAT,GA3BY,CA2BM;EACnB;;EAED,KAAKrF,YAAL;EACA,KAAK4F,IAAL,CAAU,OAAV;EACA,OAAO,IAAP;AACD,CAxCD;;AA0CA9F,WAAW,CAACC,SAAZ,CAAsB8F,KAAtB,GAA8B,UAAUC,IAAV,EAAgBC,IAAhB,EAAsBlF,OAAtB,EAA+BmF,aAA/B,EAA8C;EAC1E,QAAQnF,OAAO,CAACoF,IAAhB;IACE,KAAK,OAAL;MACE,KAAKxB,GAAL,CAAS,eAAT,kBAAmCuB,aAAa,WAAIF,IAAJ,cAAYC,IAAZ,EAAhD;MACA;;IAEF,KAAK,MAAL;MACE,KAAKG,QAAL,GAAgBJ,IAAhB;MACA,KAAKK,QAAL,GAAgBJ,IAAhB;MACA;;IAEF,KAAK,QAAL;MAAe;MACb,KAAKtB,GAAL,CAAS,eAAT,mBAAoCqB,IAApC;MACA;;IACF;MACE;EAdJ;;EAiBA,OAAO,IAAP;AACD,CAnBD;AAqBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAhG,WAAW,CAACC,SAAZ,CAAsBqG,eAAtB,GAAwC,UAAU5C,EAAV,EAAc;EACpD;EACA,IAAIA,EAAE,KAAKO,SAAX,EAAsBP,EAAE,GAAG,IAAL;EACtB,KAAK6C,gBAAL,GAAwB7C,EAAxB;EACA,OAAO,IAAP;AACD,CALD;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEA1D,WAAW,CAACC,SAAZ,CAAsBuG,SAAtB,GAAkC,UAAUC,CAAV,EAAa;EAC7C,KAAKC,aAAL,GAAqBD,CAArB;EACA,OAAO,IAAP;AACD,CAHD;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;;;AACAzG,WAAW,CAACC,SAAZ,CAAsB0G,eAAtB,GAAwC,UAAUF,CAAV,EAAa;EACnD,IAAI,OAAOA,CAAP,KAAa,QAAjB,EAA2B;IACzB,MAAM,IAAIG,SAAJ,CAAc,kBAAd,CAAN;EACD;;EAED,KAAKC,gBAAL,GAAwBJ,CAAxB;EACA,OAAO,IAAP;AACD,CAPD;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAzG,WAAW,CAACC,SAAZ,CAAsB6G,MAAtB,GAA+B,YAAY;EACzC,OAAO;IACLlD,MAAM,EAAE,KAAKA,MADR;IAELC,GAAG,EAAE,KAAKA,GAFL;IAGLkD,IAAI,EAAE,KAAK/B,KAHN;IAILgC,OAAO,EAAE,KAAKxC;EAJT,CAAP;AAMD,CAPD;AASA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAEA;;;AACAxE,WAAW,CAACC,SAAZ,CAAsBgH,IAAtB,GAA6B,UAAUF,IAAV,EAAgB;EAC3C,IAAMG,SAAS,GAAGtH,QAAQ,CAACmH,IAAD,CAA1B;EACA,IAAIZ,IAAI,GAAG,KAAK3B,OAAL,CAAa,cAAb,CAAX;;EAEA,IAAI,KAAK2C,SAAT,EAAoB;IAClB,MAAM,IAAIxD,KAAJ,CACJ,8GADI,CAAN;EAGD;;EAED,IAAIuD,SAAS,IAAI,CAAC,KAAKlC,KAAvB,EAA8B;IAC5B,IAAIC,KAAK,CAACC,OAAN,CAAc6B,IAAd,CAAJ,EAAyB;MACvB,KAAK/B,KAAL,GAAa,EAAb;IACD,CAFD,MAEO,IAAI,CAAC,KAAKoC,OAAL,CAAaL,IAAb,CAAL,EAAyB;MAC9B,KAAK/B,KAAL,GAAa,EAAb;IACD;EACF,CAND,MAMO,IAAI+B,IAAI,IAAI,KAAK/B,KAAb,IAAsB,KAAKoC,OAAL,CAAa,KAAKpC,KAAlB,CAA1B,EAAoD;IACzD,MAAM,IAAIrB,KAAJ,CAAU,8BAAV,CAAN;EACD,CAlB0C,CAoB3C;;;EACA,IAAIuD,SAAS,IAAItH,QAAQ,CAAC,KAAKoF,KAAN,CAAzB,EAAuC;IACrC,KAAK,IAAMJ,GAAX,IAAkBmC,IAAlB,EAAwB;MACtB,IAAIlH,MAAM,CAACkH,IAAD,EAAOnC,GAAP,CAAV,EAAuB,KAAKI,KAAL,CAAWJ,GAAX,IAAkBmC,IAAI,CAACnC,GAAD,CAAtB;IACxB;EACF,CAJD,MAIO,IAAI,OAAOmC,IAAP,KAAgB,QAApB,EAA8B;IACnC;IACA,IAAI,CAACZ,IAAL,EAAW,KAAKA,IAAL,CAAU,MAAV;IACXA,IAAI,GAAG,KAAK3B,OAAL,CAAa,cAAb,CAAP;IACA,IAAI2B,IAAJ,EAAUA,IAAI,GAAGA,IAAI,CAAC1B,WAAL,GAAmB4C,IAAnB,EAAP;;IACV,IAAIlB,IAAI,KAAK,mCAAb,EAAkD;MAChD,KAAKnB,KAAL,GAAa,KAAKA,KAAL,aAAgB,KAAKA,KAArB,cAA8B+B,IAA9B,IAAuCA,IAApD;IACD,CAFD,MAEO;MACL,KAAK/B,KAAL,GAAa,CAAC,KAAKA,KAAL,IAAc,EAAf,IAAqB+B,IAAlC;IACD;EACF,CAVM,MAUA;IACL,KAAK/B,KAAL,GAAa+B,IAAb;EACD;;EAED,IAAI,CAACG,SAAD,IAAc,KAAKE,OAAL,CAAaL,IAAb,CAAlB,EAAsC;IACpC,OAAO,IAAP;EACD,CAzC0C,CA2C3C;;;EACA,IAAI,CAACZ,IAAL,EAAW,KAAKA,IAAL,CAAU,MAAV;EACX,OAAO,IAAP;AACD,CA9CD;AAgDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AAEAnG,WAAW,CAACC,SAAZ,CAAsBqH,SAAtB,GAAkC,UAAUC,IAAV,EAAgB;EAChD;EACA,KAAKC,KAAL,GAAa,OAAOD,IAAP,KAAgB,WAAhB,GAA8B,IAA9B,GAAqCA,IAAlD;EACA,OAAO,IAAP;AACD,CAJD;AAMA;AACA;AACA;AACA;AACA;;;AACAvH,WAAW,CAACC,SAAZ,CAAsBwH,oBAAtB,GAA6C,YAAY;EACvD,IAAMC,KAAK,GAAG,KAAKC,MAAL,CAAYC,IAAZ,CAAiB,GAAjB,CAAd;;EACA,IAAIF,KAAJ,EAAW;IACT,KAAK7D,GAAL,IAAY,CAAC,KAAKA,GAAL,CAASgE,QAAT,CAAkB,GAAlB,IAAyB,GAAzB,GAA+B,GAAhC,IAAuCH,KAAnD;EACD;;EAED,KAAKC,MAAL,CAAY/F,MAAZ,GAAqB,CAArB,CANuD,CAM/B;;EAExB,IAAI,KAAK4F,KAAT,EAAgB;IACd,IAAMM,KAAK,GAAG,KAAKjE,GAAL,CAASkE,OAAT,CAAiB,GAAjB,CAAd;;IACA,IAAID,KAAK,IAAI,CAAb,EAAgB;MACd,IAAME,UAAU,GAAG,KAAKnE,GAAL,CAASoE,KAAT,CAAeH,KAAK,GAAG,CAAvB,EAA0BI,KAA1B,CAAgC,GAAhC,CAAnB;;MACA,IAAI,OAAO,KAAKV,KAAZ,KAAsB,UAA1B,EAAsC;QACpCQ,UAAU,CAACT,IAAX,CAAgB,KAAKC,KAArB;MACD,CAFD,MAEO;QACLQ,UAAU,CAACT,IAAX;MACD;;MAED,KAAK1D,GAAL,GAAW,KAAKA,GAAL,CAASoE,KAAT,CAAe,CAAf,EAAkBH,KAAlB,IAA2B,GAA3B,GAAiCE,UAAU,CAACJ,IAAX,CAAgB,GAAhB,CAA5C;IACD;EACF;AACF,CArBD,C,CAuBA;;;AACA5H,WAAW,CAACC,SAAZ,CAAsBkI,kBAAtB,GAA2C,YAAM;EAC/C5G,OAAO,CAACC,IAAR,CAAa,aAAb;AACD,CAFD;AAIA;AACA;AACA;AACA;AACA;;;AAEAxB,WAAW,CAACC,SAAZ,CAAsBmI,aAAtB,GAAsC,UAAUC,MAAV,EAAkBvH,OAAlB,EAA2BwH,KAA3B,EAAkC;EACtE,IAAI,KAAKvF,QAAT,EAAmB;IACjB;EACD;;EAED,IAAMX,KAAK,GAAG,IAAIuB,KAAJ,WAAa0E,MAAM,GAAGvH,OAAtB,iBAAd;EACAsB,KAAK,CAACtB,OAAN,GAAgBA,OAAhB;EACAsB,KAAK,CAACM,IAAN,GAAa,cAAb;EACAN,KAAK,CAACkG,KAAN,GAAcA,KAAd;EACA,KAAKtF,QAAL,GAAgB,IAAhB;EACA,KAAKC,aAAL,GAAqBb,KAArB;EACA,KAAKmD,KAAL;EACA,KAAKvB,QAAL,CAAc5B,KAAd;AACD,CAbD;;AAeApC,WAAW,CAACC,SAAZ,CAAsBsI,YAAtB,GAAqC,YAAY;EAC/C,IAAMhF,IAAI,GAAG,IAAb,CAD+C,CAG/C;;EACA,IAAI,KAAKvC,QAAL,IAAiB,CAAC,KAAKb,MAA3B,EAAmC;IACjC,KAAKA,MAAL,GAAcqI,UAAU,CAAC,YAAM;MAC7BjF,IAAI,CAAC6E,aAAL,CAAmB,aAAnB,EAAkC7E,IAAI,CAACvC,QAAvC,EAAiD,OAAjD;IACD,CAFuB,EAErB,KAAKA,QAFgB,CAAxB;EAGD,CAR8C,CAU/C;;;EACA,IAAI,KAAKC,gBAAL,IAAyB,CAAC,KAAKb,qBAAnC,EAA0D;IACxD,KAAKA,qBAAL,GAA6BoI,UAAU,CAAC,YAAM;MAC5CjF,IAAI,CAAC6E,aAAL,CACE,sBADF,EAEE7E,IAAI,CAACtC,gBAFP,EAGE,WAHF;IAKD,CANsC,EAMpC,KAAKA,gBAN+B,CAAvC;EAOD;AACF,CApBD"} \ No newline at end of file diff --git a/node_modules/superagent/lib/response-base.js b/node_modules/superagent/lib/response-base.js new file mode 100644 index 0000000..015bf81 --- /dev/null +++ b/node_modules/superagent/lib/response-base.js @@ -0,0 +1,113 @@ +"use strict"; + +/** + * Module dependencies. + */ +var utils = require('./utils'); +/** + * Expose `ResponseBase`. + */ + + +module.exports = ResponseBase; +/** + * Initialize a new `ResponseBase`. + * + * @api public + */ + +function ResponseBase() {} +/** + * Get case-insensitive `field` value. + * + * @param {String} field + * @return {String} + * @api public + */ + + +ResponseBase.prototype.get = function (field) { + return this.header[field.toLowerCase()]; +}; +/** + * Set header related properties: + * + * - `.type` the content type without params + * + * A response of "Content-Type: text/plain; charset=utf-8" + * will provide you with a `.type` of "text/plain". + * + * @param {Object} header + * @api private + */ + + +ResponseBase.prototype._setHeaderProperties = function (header) { + // TODO: moar! + // TODO: make this a util + // content-type + var ct = header['content-type'] || ''; + this.type = utils.type(ct); // params + + var parameters = utils.params(ct); + + for (var key in parameters) { + if (Object.prototype.hasOwnProperty.call(parameters, key)) this[key] = parameters[key]; + } + + this.links = {}; // links + + try { + if (header.link) { + this.links = utils.parseLinks(header.link); + } + } catch (_unused) {// ignore + } +}; +/** + * Set flags such as `.ok` based on `status`. + * + * For example a 2xx response will give you a `.ok` of __true__ + * whereas 5xx will be __false__ and `.error` will be __true__. The + * `.clientError` and `.serverError` are also available to be more + * specific, and `.statusType` is the class of error ranging from 1..5 + * sometimes useful for mapping respond colors etc. + * + * "sugar" properties are also defined for common cases. Currently providing: + * + * - .noContent + * - .badRequest + * - .unauthorized + * - .notAcceptable + * - .notFound + * + * @param {Number} status + * @api private + */ + + +ResponseBase.prototype._setStatusProperties = function (status) { + var type = Math.trunc(status / 100); // status / class + + this.statusCode = status; + this.status = this.statusCode; + this.statusType = type; // basics + + this.info = type === 1; + this.ok = type === 2; + this.redirect = type === 3; + this.clientError = type === 4; + this.serverError = type === 5; + this.error = type === 4 || type === 5 ? this.toError() : false; // sugar + + this.created = status === 201; + this.accepted = status === 202; + this.noContent = status === 204; + this.badRequest = status === 400; + this.unauthorized = status === 401; + this.notAcceptable = status === 406; + this.forbidden = status === 403; + this.notFound = status === 404; + this.unprocessableEntity = status === 422; +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJ1dGlscyIsInJlcXVpcmUiLCJtb2R1bGUiLCJleHBvcnRzIiwiUmVzcG9uc2VCYXNlIiwicHJvdG90eXBlIiwiZ2V0IiwiZmllbGQiLCJoZWFkZXIiLCJ0b0xvd2VyQ2FzZSIsIl9zZXRIZWFkZXJQcm9wZXJ0aWVzIiwiY3QiLCJ0eXBlIiwicGFyYW1ldGVycyIsInBhcmFtcyIsImtleSIsIk9iamVjdCIsImhhc093blByb3BlcnR5IiwiY2FsbCIsImxpbmtzIiwibGluayIsInBhcnNlTGlua3MiLCJfc2V0U3RhdHVzUHJvcGVydGllcyIsInN0YXR1cyIsIk1hdGgiLCJ0cnVuYyIsInN0YXR1c0NvZGUiLCJzdGF0dXNUeXBlIiwiaW5mbyIsIm9rIiwicmVkaXJlY3QiLCJjbGllbnRFcnJvciIsInNlcnZlckVycm9yIiwiZXJyb3IiLCJ0b0Vycm9yIiwiY3JlYXRlZCIsImFjY2VwdGVkIiwibm9Db250ZW50IiwiYmFkUmVxdWVzdCIsInVuYXV0aG9yaXplZCIsIm5vdEFjY2VwdGFibGUiLCJmb3JiaWRkZW4iLCJub3RGb3VuZCIsInVucHJvY2Vzc2FibGVFbnRpdHkiXSwic291cmNlcyI6WyIuLi9zcmMvcmVzcG9uc2UtYmFzZS5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyIvKipcbiAqIE1vZHVsZSBkZXBlbmRlbmNpZXMuXG4gKi9cblxuY29uc3QgdXRpbHMgPSByZXF1aXJlKCcuL3V0aWxzJyk7XG5cbi8qKlxuICogRXhwb3NlIGBSZXNwb25zZUJhc2VgLlxuICovXG5cbm1vZHVsZS5leHBvcnRzID0gUmVzcG9uc2VCYXNlO1xuXG4vKipcbiAqIEluaXRpYWxpemUgYSBuZXcgYFJlc3BvbnNlQmFzZWAuXG4gKlxuICogQGFwaSBwdWJsaWNcbiAqL1xuXG5mdW5jdGlvbiBSZXNwb25zZUJhc2UoKSB7fVxuXG4vKipcbiAqIEdldCBjYXNlLWluc2Vuc2l0aXZlIGBmaWVsZGAgdmFsdWUuXG4gKlxuICogQHBhcmFtIHtTdHJpbmd9IGZpZWxkXG4gKiBAcmV0dXJuIHtTdHJpbmd9XG4gKiBAYXBpIHB1YmxpY1xuICovXG5cblJlc3BvbnNlQmFzZS5wcm90b3R5cGUuZ2V0ID0gZnVuY3Rpb24gKGZpZWxkKSB7XG4gIHJldHVybiB0aGlzLmhlYWRlcltmaWVsZC50b0xvd2VyQ2FzZSgpXTtcbn07XG5cbi8qKlxuICogU2V0IGhlYWRlciByZWxhdGVkIHByb3BlcnRpZXM6XG4gKlxuICogICAtIGAudHlwZWAgdGhlIGNvbnRlbnQgdHlwZSB3aXRob3V0IHBhcmFtc1xuICpcbiAqIEEgcmVzcG9uc2Ugb2YgXCJDb250ZW50LVR5cGU6IHRleHQvcGxhaW47IGNoYXJzZXQ9dXRmLThcIlxuICogd2lsbCBwcm92aWRlIHlvdSB3aXRoIGEgYC50eXBlYCBvZiBcInRleHQvcGxhaW5cIi5cbiAqXG4gKiBAcGFyYW0ge09iamVjdH0gaGVhZGVyXG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5SZXNwb25zZUJhc2UucHJvdG90eXBlLl9zZXRIZWFkZXJQcm9wZXJ0aWVzID0gZnVuY3Rpb24gKGhlYWRlcikge1xuICAvLyBUT0RPOiBtb2FyIVxuICAvLyBUT0RPOiBtYWtlIHRoaXMgYSB1dGlsXG5cbiAgLy8gY29udGVudC10eXBlXG4gIGNvbnN0IGN0ID0gaGVhZGVyWydjb250ZW50LXR5cGUnXSB8fCAnJztcbiAgdGhpcy50eXBlID0gdXRpbHMudHlwZShjdCk7XG5cbiAgLy8gcGFyYW1zXG4gIGNvbnN0IHBhcmFtZXRlcnMgPSB1dGlscy5wYXJhbXMoY3QpO1xuICBmb3IgKGNvbnN0IGtleSBpbiBwYXJhbWV0ZXJzKSB7XG4gICAgaWYgKE9iamVjdC5wcm90b3R5cGUuaGFzT3duUHJvcGVydHkuY2FsbChwYXJhbWV0ZXJzLCBrZXkpKVxuICAgICAgdGhpc1trZXldID0gcGFyYW1ldGVyc1trZXldO1xuICB9XG5cbiAgdGhpcy5saW5rcyA9IHt9O1xuXG4gIC8vIGxpbmtzXG4gIHRyeSB7XG4gICAgaWYgKGhlYWRlci5saW5rKSB7XG4gICAgICB0aGlzLmxpbmtzID0gdXRpbHMucGFyc2VMaW5rcyhoZWFkZXIubGluayk7XG4gICAgfVxuICB9IGNhdGNoIHtcbiAgICAvLyBpZ25vcmVcbiAgfVxufTtcblxuLyoqXG4gKiBTZXQgZmxhZ3Mgc3VjaCBhcyBgLm9rYCBiYXNlZCBvbiBgc3RhdHVzYC5cbiAqXG4gKiBGb3IgZXhhbXBsZSBhIDJ4eCByZXNwb25zZSB3aWxsIGdpdmUgeW91IGEgYC5va2Agb2YgX190cnVlX19cbiAqIHdoZXJlYXMgNXh4IHdpbGwgYmUgX19mYWxzZV9fIGFuZCBgLmVycm9yYCB3aWxsIGJlIF9fdHJ1ZV9fLiBUaGVcbiAqIGAuY2xpZW50RXJyb3JgIGFuZCBgLnNlcnZlckVycm9yYCBhcmUgYWxzbyBhdmFpbGFibGUgdG8gYmUgbW9yZVxuICogc3BlY2lmaWMsIGFuZCBgLnN0YXR1c1R5cGVgIGlzIHRoZSBjbGFzcyBvZiBlcnJvciByYW5naW5nIGZyb20gMS4uNVxuICogc29tZXRpbWVzIHVzZWZ1bCBmb3IgbWFwcGluZyByZXNwb25kIGNvbG9ycyBldGMuXG4gKlxuICogXCJzdWdhclwiIHByb3BlcnRpZXMgYXJlIGFsc28gZGVmaW5lZCBmb3IgY29tbW9uIGNhc2VzLiBDdXJyZW50bHkgcHJvdmlkaW5nOlxuICpcbiAqICAgLSAubm9Db250ZW50XG4gKiAgIC0gLmJhZFJlcXVlc3RcbiAqICAgLSAudW5hdXRob3JpemVkXG4gKiAgIC0gLm5vdEFjY2VwdGFibGVcbiAqICAgLSAubm90Rm91bmRcbiAqXG4gKiBAcGFyYW0ge051bWJlcn0gc3RhdHVzXG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5SZXNwb25zZUJhc2UucHJvdG90eXBlLl9zZXRTdGF0dXNQcm9wZXJ0aWVzID0gZnVuY3Rpb24gKHN0YXR1cykge1xuICBjb25zdCB0eXBlID0gTWF0aC50cnVuYyhzdGF0dXMgLyAxMDApO1xuXG4gIC8vIHN0YXR1cyAvIGNsYXNzXG4gIHRoaXMuc3RhdHVzQ29kZSA9IHN0YXR1cztcbiAgdGhpcy5zdGF0dXMgPSB0aGlzLnN0YXR1c0NvZGU7XG4gIHRoaXMuc3RhdHVzVHlwZSA9IHR5cGU7XG5cbiAgLy8gYmFzaWNzXG4gIHRoaXMuaW5mbyA9IHR5cGUgPT09IDE7XG4gIHRoaXMub2sgPSB0eXBlID09PSAyO1xuICB0aGlzLnJlZGlyZWN0ID0gdHlwZSA9PT0gMztcbiAgdGhpcy5jbGllbnRFcnJvciA9IHR5cGUgPT09IDQ7XG4gIHRoaXMuc2VydmVyRXJyb3IgPSB0eXBlID09PSA1O1xuICB0aGlzLmVycm9yID0gdHlwZSA9PT0gNCB8fCB0eXBlID09PSA1ID8gdGhpcy50b0Vycm9yKCkgOiBmYWxzZTtcblxuICAvLyBzdWdhclxuICB0aGlzLmNyZWF0ZWQgPSBzdGF0dXMgPT09IDIwMTtcbiAgdGhpcy5hY2NlcHRlZCA9IHN0YXR1cyA9PT0gMjAyO1xuICB0aGlzLm5vQ29udGVudCA9IHN0YXR1cyA9PT0gMjA0O1xuICB0aGlzLmJhZFJlcXVlc3QgPSBzdGF0dXMgPT09IDQwMDtcbiAgdGhpcy51bmF1dGhvcml6ZWQgPSBzdGF0dXMgPT09IDQwMTtcbiAgdGhpcy5ub3RBY2NlcHRhYmxlID0gc3RhdHVzID09PSA0MDY7XG4gIHRoaXMuZm9yYmlkZGVuID0gc3RhdHVzID09PSA0MDM7XG4gIHRoaXMubm90Rm91bmQgPSBzdGF0dXMgPT09IDQwNDtcbiAgdGhpcy51bnByb2Nlc3NhYmxlRW50aXR5ID0gc3RhdHVzID09PSA0MjI7XG59O1xuIl0sIm1hcHBpbmdzIjoiOztBQUFBO0FBQ0E7QUFDQTtBQUVBLElBQU1BLEtBQUssR0FBR0MsT0FBTyxDQUFDLFNBQUQsQ0FBckI7QUFFQTtBQUNBO0FBQ0E7OztBQUVBQyxNQUFNLENBQUNDLE9BQVAsR0FBaUJDLFlBQWpCO0FBRUE7QUFDQTtBQUNBO0FBQ0E7QUFDQTs7QUFFQSxTQUFTQSxZQUFULEdBQXdCLENBQUU7QUFFMUI7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7OztBQUVBQSxZQUFZLENBQUNDLFNBQWIsQ0FBdUJDLEdBQXZCLEdBQTZCLFVBQVVDLEtBQVYsRUFBaUI7RUFDNUMsT0FBTyxLQUFLQyxNQUFMLENBQVlELEtBQUssQ0FBQ0UsV0FBTixFQUFaLENBQVA7QUFDRCxDQUZEO0FBSUE7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTs7O0FBRUFMLFlBQVksQ0FBQ0MsU0FBYixDQUF1Qkssb0JBQXZCLEdBQThDLFVBQVVGLE1BQVYsRUFBa0I7RUFDOUQ7RUFDQTtFQUVBO0VBQ0EsSUFBTUcsRUFBRSxHQUFHSCxNQUFNLENBQUMsY0FBRCxDQUFOLElBQTBCLEVBQXJDO0VBQ0EsS0FBS0ksSUFBTCxHQUFZWixLQUFLLENBQUNZLElBQU4sQ0FBV0QsRUFBWCxDQUFaLENBTjhELENBUTlEOztFQUNBLElBQU1FLFVBQVUsR0FBR2IsS0FBSyxDQUFDYyxNQUFOLENBQWFILEVBQWIsQ0FBbkI7O0VBQ0EsS0FBSyxJQUFNSSxHQUFYLElBQWtCRixVQUFsQixFQUE4QjtJQUM1QixJQUFJRyxNQUFNLENBQUNYLFNBQVAsQ0FBaUJZLGNBQWpCLENBQWdDQyxJQUFoQyxDQUFxQ0wsVUFBckMsRUFBaURFLEdBQWpELENBQUosRUFDRSxLQUFLQSxHQUFMLElBQVlGLFVBQVUsQ0FBQ0UsR0FBRCxDQUF0QjtFQUNIOztFQUVELEtBQUtJLEtBQUwsR0FBYSxFQUFiLENBZjhELENBaUI5RDs7RUFDQSxJQUFJO0lBQ0YsSUFBSVgsTUFBTSxDQUFDWSxJQUFYLEVBQWlCO01BQ2YsS0FBS0QsS0FBTCxHQUFhbkIsS0FBSyxDQUFDcUIsVUFBTixDQUFpQmIsTUFBTSxDQUFDWSxJQUF4QixDQUFiO0lBQ0Q7RUFDRixDQUpELENBSUUsZ0JBQU0sQ0FDTjtFQUNEO0FBQ0YsQ0F6QkQ7QUEyQkE7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTs7O0FBRUFoQixZQUFZLENBQUNDLFNBQWIsQ0FBdUJpQixvQkFBdkIsR0FBOEMsVUFBVUMsTUFBVixFQUFrQjtFQUM5RCxJQUFNWCxJQUFJLEdBQUdZLElBQUksQ0FBQ0MsS0FBTCxDQUFXRixNQUFNLEdBQUcsR0FBcEIsQ0FBYixDQUQ4RCxDQUc5RDs7RUFDQSxLQUFLRyxVQUFMLEdBQWtCSCxNQUFsQjtFQUNBLEtBQUtBLE1BQUwsR0FBYyxLQUFLRyxVQUFuQjtFQUNBLEtBQUtDLFVBQUwsR0FBa0JmLElBQWxCLENBTjhELENBUTlEOztFQUNBLEtBQUtnQixJQUFMLEdBQVloQixJQUFJLEtBQUssQ0FBckI7RUFDQSxLQUFLaUIsRUFBTCxHQUFVakIsSUFBSSxLQUFLLENBQW5CO0VBQ0EsS0FBS2tCLFFBQUwsR0FBZ0JsQixJQUFJLEtBQUssQ0FBekI7RUFDQSxLQUFLbUIsV0FBTCxHQUFtQm5CLElBQUksS0FBSyxDQUE1QjtFQUNBLEtBQUtvQixXQUFMLEdBQW1CcEIsSUFBSSxLQUFLLENBQTVCO0VBQ0EsS0FBS3FCLEtBQUwsR0FBYXJCLElBQUksS0FBSyxDQUFULElBQWNBLElBQUksS0FBSyxDQUF2QixHQUEyQixLQUFLc0IsT0FBTCxFQUEzQixHQUE0QyxLQUF6RCxDQWQ4RCxDQWdCOUQ7O0VBQ0EsS0FBS0MsT0FBTCxHQUFlWixNQUFNLEtBQUssR0FBMUI7RUFDQSxLQUFLYSxRQUFMLEdBQWdCYixNQUFNLEtBQUssR0FBM0I7RUFDQSxLQUFLYyxTQUFMLEdBQWlCZCxNQUFNLEtBQUssR0FBNUI7RUFDQSxLQUFLZSxVQUFMLEdBQWtCZixNQUFNLEtBQUssR0FBN0I7RUFDQSxLQUFLZ0IsWUFBTCxHQUFvQmhCLE1BQU0sS0FBSyxHQUEvQjtFQUNBLEtBQUtpQixhQUFMLEdBQXFCakIsTUFBTSxLQUFLLEdBQWhDO0VBQ0EsS0FBS2tCLFNBQUwsR0FBaUJsQixNQUFNLEtBQUssR0FBNUI7RUFDQSxLQUFLbUIsUUFBTCxHQUFnQm5CLE1BQU0sS0FBSyxHQUEzQjtFQUNBLEtBQUtvQixtQkFBTCxHQUEyQnBCLE1BQU0sS0FBSyxHQUF0QztBQUNELENBMUJEIn0= \ No newline at end of file diff --git a/node_modules/superagent/lib/utils.js b/node_modules/superagent/lib/utils.js new file mode 100644 index 0000000..a3919d5 --- /dev/null +++ b/node_modules/superagent/lib/utils.js @@ -0,0 +1,142 @@ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +/** + * Return the mime type for the given `str`. + * + * @param {String} str + * @return {String} + * @api private + */ +exports.type = function (string_) { + return string_.split(/ *; */).shift(); +}; +/** + * Return header field parameters. + * + * @param {String} str + * @return {Object} + * @api private + */ + + +exports.params = function (value) { + var object = {}; + + var _iterator = _createForOfIteratorHelper(value.split(/ *; */)), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var string_ = _step.value; + var parts = string_.split(/ *= */); + var key = parts.shift(); + + var _value = parts.shift(); + + if (key && _value) object[key] = _value; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return object; +}; +/** + * Parse Link header fields. + * + * @param {String} str + * @return {Object} + * @api private + */ + + +exports.parseLinks = function (value) { + var object = {}; + + var _iterator2 = _createForOfIteratorHelper(value.split(/ *, */)), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var string_ = _step2.value; + var parts = string_.split(/ *; */); + var url = parts[0].slice(1, -1); + var rel = parts[1].split(/ *= */)[1].slice(1, -1); + object[rel] = url; + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + + return object; +}; +/** + * Strip content related fields from `header`. + * + * @param {Object} header + * @return {Object} header + * @api private + */ + + +exports.cleanHeader = function (header, changesOrigin) { + delete header['content-type']; + delete header['content-length']; + delete header['transfer-encoding']; + delete header.host; // secuirty + + if (changesOrigin) { + delete header.authorization; + delete header.cookie; + } + + return header; +}; +/** + * Check if `obj` is an object. + * + * @param {Object} object + * @return {Boolean} + * @api private + */ + + +exports.isObject = function (object) { + return object !== null && _typeof(object) === 'object'; +}; +/** + * Object.hasOwn fallback/polyfill. + * + * @type {(object: object, property: string) => boolean} object + * @api private + */ + + +exports.hasOwn = Object.hasOwn || function (object, property) { + if (object == null) { + throw new TypeError('Cannot convert undefined or null to object'); + } + + return Object.prototype.hasOwnProperty.call(new Object(object), property); +}; + +exports.mixin = function (target, source) { + for (var key in source) { + if (exports.hasOwn(source, key)) { + target[key] = source[key]; + } + } +}; +//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJleHBvcnRzIiwidHlwZSIsInN0cmluZ18iLCJzcGxpdCIsInNoaWZ0IiwicGFyYW1zIiwidmFsdWUiLCJvYmplY3QiLCJwYXJ0cyIsImtleSIsInBhcnNlTGlua3MiLCJ1cmwiLCJzbGljZSIsInJlbCIsImNsZWFuSGVhZGVyIiwiaGVhZGVyIiwiY2hhbmdlc09yaWdpbiIsImhvc3QiLCJhdXRob3JpemF0aW9uIiwiY29va2llIiwiaXNPYmplY3QiLCJoYXNPd24iLCJPYmplY3QiLCJwcm9wZXJ0eSIsIlR5cGVFcnJvciIsInByb3RvdHlwZSIsImhhc093blByb3BlcnR5IiwiY2FsbCIsIm1peGluIiwidGFyZ2V0Iiwic291cmNlIl0sInNvdXJjZXMiOlsiLi4vc3JjL3V0aWxzLmpzIl0sInNvdXJjZXNDb250ZW50IjpbIi8qKlxuICogUmV0dXJuIHRoZSBtaW1lIHR5cGUgZm9yIHRoZSBnaXZlbiBgc3RyYC5cbiAqXG4gKiBAcGFyYW0ge1N0cmluZ30gc3RyXG4gKiBAcmV0dXJuIHtTdHJpbmd9XG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5leHBvcnRzLnR5cGUgPSAoc3RyaW5nXykgPT4gc3RyaW5nXy5zcGxpdCgvICo7ICovKS5zaGlmdCgpO1xuXG4vKipcbiAqIFJldHVybiBoZWFkZXIgZmllbGQgcGFyYW1ldGVycy5cbiAqXG4gKiBAcGFyYW0ge1N0cmluZ30gc3RyXG4gKiBAcmV0dXJuIHtPYmplY3R9XG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5leHBvcnRzLnBhcmFtcyA9ICh2YWx1ZSkgPT4ge1xuICBjb25zdCBvYmplY3QgPSB7fTtcbiAgZm9yIChjb25zdCBzdHJpbmdfIG9mIHZhbHVlLnNwbGl0KC8gKjsgKi8pKSB7XG4gICAgY29uc3QgcGFydHMgPSBzdHJpbmdfLnNwbGl0KC8gKj0gKi8pO1xuICAgIGNvbnN0IGtleSA9IHBhcnRzLnNoaWZ0KCk7XG4gICAgY29uc3QgdmFsdWUgPSBwYXJ0cy5zaGlmdCgpO1xuXG4gICAgaWYgKGtleSAmJiB2YWx1ZSkgb2JqZWN0W2tleV0gPSB2YWx1ZTtcbiAgfVxuXG4gIHJldHVybiBvYmplY3Q7XG59O1xuXG4vKipcbiAqIFBhcnNlIExpbmsgaGVhZGVyIGZpZWxkcy5cbiAqXG4gKiBAcGFyYW0ge1N0cmluZ30gc3RyXG4gKiBAcmV0dXJuIHtPYmplY3R9XG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5leHBvcnRzLnBhcnNlTGlua3MgPSAodmFsdWUpID0+IHtcbiAgY29uc3Qgb2JqZWN0ID0ge307XG4gIGZvciAoY29uc3Qgc3RyaW5nXyBvZiB2YWx1ZS5zcGxpdCgvICosICovKSkge1xuICAgIGNvbnN0IHBhcnRzID0gc3RyaW5nXy5zcGxpdCgvICo7ICovKTtcbiAgICBjb25zdCB1cmwgPSBwYXJ0c1swXS5zbGljZSgxLCAtMSk7XG4gICAgY29uc3QgcmVsID0gcGFydHNbMV0uc3BsaXQoLyAqPSAqLylbMV0uc2xpY2UoMSwgLTEpO1xuICAgIG9iamVjdFtyZWxdID0gdXJsO1xuICB9XG5cbiAgcmV0dXJuIG9iamVjdDtcbn07XG5cbi8qKlxuICogU3RyaXAgY29udGVudCByZWxhdGVkIGZpZWxkcyBmcm9tIGBoZWFkZXJgLlxuICpcbiAqIEBwYXJhbSB7T2JqZWN0fSBoZWFkZXJcbiAqIEByZXR1cm4ge09iamVjdH0gaGVhZGVyXG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuXG5leHBvcnRzLmNsZWFuSGVhZGVyID0gKGhlYWRlciwgY2hhbmdlc09yaWdpbikgPT4ge1xuICBkZWxldGUgaGVhZGVyWydjb250ZW50LXR5cGUnXTtcbiAgZGVsZXRlIGhlYWRlclsnY29udGVudC1sZW5ndGgnXTtcbiAgZGVsZXRlIGhlYWRlclsndHJhbnNmZXItZW5jb2RpbmcnXTtcbiAgZGVsZXRlIGhlYWRlci5ob3N0O1xuICAvLyBzZWN1aXJ0eVxuICBpZiAoY2hhbmdlc09yaWdpbikge1xuICAgIGRlbGV0ZSBoZWFkZXIuYXV0aG9yaXphdGlvbjtcbiAgICBkZWxldGUgaGVhZGVyLmNvb2tpZTtcbiAgfVxuXG4gIHJldHVybiBoZWFkZXI7XG59O1xuXG4vKipcbiAqIENoZWNrIGlmIGBvYmpgIGlzIGFuIG9iamVjdC5cbiAqXG4gKiBAcGFyYW0ge09iamVjdH0gb2JqZWN0XG4gKiBAcmV0dXJuIHtCb29sZWFufVxuICogQGFwaSBwcml2YXRlXG4gKi9cbmV4cG9ydHMuaXNPYmplY3QgPSAob2JqZWN0KSA9PiB7XG4gIHJldHVybiBvYmplY3QgIT09IG51bGwgJiYgdHlwZW9mIG9iamVjdCA9PT0gJ29iamVjdCc7XG59O1xuXG4vKipcbiAqIE9iamVjdC5oYXNPd24gZmFsbGJhY2svcG9seWZpbGwuXG4gKlxuICogQHR5cGUgeyhvYmplY3Q6IG9iamVjdCwgcHJvcGVydHk6IHN0cmluZykgPT4gYm9vbGVhbn0gb2JqZWN0XG4gKiBAYXBpIHByaXZhdGVcbiAqL1xuZXhwb3J0cy5oYXNPd24gPVxuICBPYmplY3QuaGFzT3duIHx8XG4gIGZ1bmN0aW9uIChvYmplY3QsIHByb3BlcnR5KSB7XG4gICAgaWYgKG9iamVjdCA9PSBudWxsKSB7XG4gICAgICB0aHJvdyBuZXcgVHlwZUVycm9yKCdDYW5ub3QgY29udmVydCB1bmRlZmluZWQgb3IgbnVsbCB0byBvYmplY3QnKTtcbiAgICB9XG5cbiAgICByZXR1cm4gT2JqZWN0LnByb3RvdHlwZS5oYXNPd25Qcm9wZXJ0eS5jYWxsKG5ldyBPYmplY3Qob2JqZWN0KSwgcHJvcGVydHkpO1xuICB9O1xuXG5leHBvcnRzLm1peGluID0gKHRhcmdldCwgc291cmNlKSA9PiB7XG4gIGZvciAoY29uc3Qga2V5IGluIHNvdXJjZSkge1xuICAgIGlmIChleHBvcnRzLmhhc093bihzb3VyY2UsIGtleSkpIHtcbiAgICAgIHRhcmdldFtrZXldID0gc291cmNlW2tleV07XG4gICAgfVxuICB9XG59O1xuIl0sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7O0FBQUE7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFFQUEsT0FBTyxDQUFDQyxJQUFSLEdBQWUsVUFBQ0MsT0FBRDtFQUFBLE9BQWFBLE9BQU8sQ0FBQ0MsS0FBUixDQUFjLE9BQWQsRUFBdUJDLEtBQXZCLEVBQWI7QUFBQSxDQUFmO0FBRUE7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7OztBQUVBSixPQUFPLENBQUNLLE1BQVIsR0FBaUIsVUFBQ0MsS0FBRCxFQUFXO0VBQzFCLElBQU1DLE1BQU0sR0FBRyxFQUFmOztFQUQwQiwyQ0FFSkQsS0FBSyxDQUFDSCxLQUFOLENBQVksT0FBWixDQUZJO0VBQUE7O0VBQUE7SUFFMUIsb0RBQTRDO01BQUEsSUFBakNELE9BQWlDO01BQzFDLElBQU1NLEtBQUssR0FBR04sT0FBTyxDQUFDQyxLQUFSLENBQWMsT0FBZCxDQUFkO01BQ0EsSUFBTU0sR0FBRyxHQUFHRCxLQUFLLENBQUNKLEtBQU4sRUFBWjs7TUFDQSxJQUFNRSxNQUFLLEdBQUdFLEtBQUssQ0FBQ0osS0FBTixFQUFkOztNQUVBLElBQUlLLEdBQUcsSUFBSUgsTUFBWCxFQUFrQkMsTUFBTSxDQUFDRSxHQUFELENBQU4sR0FBY0gsTUFBZDtJQUNuQjtFQVJ5QjtJQUFBO0VBQUE7SUFBQTtFQUFBOztFQVUxQixPQUFPQyxNQUFQO0FBQ0QsQ0FYRDtBQWFBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBOzs7QUFFQVAsT0FBTyxDQUFDVSxVQUFSLEdBQXFCLFVBQUNKLEtBQUQsRUFBVztFQUM5QixJQUFNQyxNQUFNLEdBQUcsRUFBZjs7RUFEOEIsNENBRVJELEtBQUssQ0FBQ0gsS0FBTixDQUFZLE9BQVosQ0FGUTtFQUFBOztFQUFBO0lBRTlCLHVEQUE0QztNQUFBLElBQWpDRCxPQUFpQztNQUMxQyxJQUFNTSxLQUFLLEdBQUdOLE9BQU8sQ0FBQ0MsS0FBUixDQUFjLE9BQWQsQ0FBZDtNQUNBLElBQU1RLEdBQUcsR0FBR0gsS0FBSyxDQUFDLENBQUQsQ0FBTCxDQUFTSSxLQUFULENBQWUsQ0FBZixFQUFrQixDQUFDLENBQW5CLENBQVo7TUFDQSxJQUFNQyxHQUFHLEdBQUdMLEtBQUssQ0FBQyxDQUFELENBQUwsQ0FBU0wsS0FBVCxDQUFlLE9BQWYsRUFBd0IsQ0FBeEIsRUFBMkJTLEtBQTNCLENBQWlDLENBQWpDLEVBQW9DLENBQUMsQ0FBckMsQ0FBWjtNQUNBTCxNQUFNLENBQUNNLEdBQUQsQ0FBTixHQUFjRixHQUFkO0lBQ0Q7RUFQNkI7SUFBQTtFQUFBO0lBQUE7RUFBQTs7RUFTOUIsT0FBT0osTUFBUDtBQUNELENBVkQ7QUFZQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTs7O0FBRUFQLE9BQU8sQ0FBQ2MsV0FBUixHQUFzQixVQUFDQyxNQUFELEVBQVNDLGFBQVQsRUFBMkI7RUFDL0MsT0FBT0QsTUFBTSxDQUFDLGNBQUQsQ0FBYjtFQUNBLE9BQU9BLE1BQU0sQ0FBQyxnQkFBRCxDQUFiO0VBQ0EsT0FBT0EsTUFBTSxDQUFDLG1CQUFELENBQWI7RUFDQSxPQUFPQSxNQUFNLENBQUNFLElBQWQsQ0FKK0MsQ0FLL0M7O0VBQ0EsSUFBSUQsYUFBSixFQUFtQjtJQUNqQixPQUFPRCxNQUFNLENBQUNHLGFBQWQ7SUFDQSxPQUFPSCxNQUFNLENBQUNJLE1BQWQ7RUFDRDs7RUFFRCxPQUFPSixNQUFQO0FBQ0QsQ0FaRDtBQWNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBOzs7QUFDQWYsT0FBTyxDQUFDb0IsUUFBUixHQUFtQixVQUFDYixNQUFELEVBQVk7RUFDN0IsT0FBT0EsTUFBTSxLQUFLLElBQVgsSUFBbUIsUUFBT0EsTUFBUCxNQUFrQixRQUE1QztBQUNELENBRkQ7QUFJQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7OztBQUNBUCxPQUFPLENBQUNxQixNQUFSLEdBQ0VDLE1BQU0sQ0FBQ0QsTUFBUCxJQUNBLFVBQVVkLE1BQVYsRUFBa0JnQixRQUFsQixFQUE0QjtFQUMxQixJQUFJaEIsTUFBTSxJQUFJLElBQWQsRUFBb0I7SUFDbEIsTUFBTSxJQUFJaUIsU0FBSixDQUFjLDRDQUFkLENBQU47RUFDRDs7RUFFRCxPQUFPRixNQUFNLENBQUNHLFNBQVAsQ0FBaUJDLGNBQWpCLENBQWdDQyxJQUFoQyxDQUFxQyxJQUFJTCxNQUFKLENBQVdmLE1BQVgsQ0FBckMsRUFBeURnQixRQUF6RCxDQUFQO0FBQ0QsQ0FSSDs7QUFVQXZCLE9BQU8sQ0FBQzRCLEtBQVIsR0FBZ0IsVUFBQ0MsTUFBRCxFQUFTQyxNQUFULEVBQW9CO0VBQ2xDLEtBQUssSUFBTXJCLEdBQVgsSUFBa0JxQixNQUFsQixFQUEwQjtJQUN4QixJQUFJOUIsT0FBTyxDQUFDcUIsTUFBUixDQUFlUyxNQUFmLEVBQXVCckIsR0FBdkIsQ0FBSixFQUFpQztNQUMvQm9CLE1BQU0sQ0FBQ3BCLEdBQUQsQ0FBTixHQUFjcUIsTUFBTSxDQUFDckIsR0FBRCxDQUFwQjtJQUNEO0VBQ0Y7QUFDRixDQU5EIn0= \ No newline at end of file diff --git a/node_modules/superagent/node_modules/debug/LICENSE b/node_modules/superagent/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/superagent/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/superagent/node_modules/debug/README.md b/node_modules/superagent/node_modules/debug/README.md new file mode 100644 index 0000000..e9c3e04 --- /dev/null +++ b/node_modules/superagent/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/superagent/node_modules/debug/package.json b/node_modules/superagent/node_modules/debug/package.json new file mode 100644 index 0000000..3bcdc24 --- /dev/null +++ b/node_modules/superagent/node_modules/debug/package.json @@ -0,0 +1,59 @@ +{ + "name": "debug", + "version": "4.3.4", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon ", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/superagent/node_modules/debug/src/browser.js b/node_modules/superagent/node_modules/debug/src/browser.js new file mode 100644 index 0000000..cd0fc35 --- /dev/null +++ b/node_modules/superagent/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/superagent/node_modules/debug/src/common.js b/node_modules/superagent/node_modules/debug/src/common.js new file mode 100644 index 0000000..e3291b2 --- /dev/null +++ b/node_modules/superagent/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/superagent/node_modules/debug/src/index.js b/node_modules/superagent/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/superagent/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/superagent/node_modules/debug/src/node.js b/node_modules/superagent/node_modules/debug/src/node.js new file mode 100644 index 0000000..79bc085 --- /dev/null +++ b/node_modules/superagent/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/superagent/node_modules/ms/index.js b/node_modules/superagent/node_modules/ms/index.js new file mode 100644 index 0000000..c4498bc --- /dev/null +++ b/node_modules/superagent/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/superagent/node_modules/ms/license.md b/node_modules/superagent/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/node_modules/superagent/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/superagent/node_modules/ms/package.json b/node_modules/superagent/node_modules/ms/package.json new file mode 100644 index 0000000..eea666e --- /dev/null +++ b/node_modules/superagent/node_modules/ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "ms", + "version": "2.1.2", + "description": "Tiny millisecond conversion utility", + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.12.1", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1" + } +} diff --git a/node_modules/superagent/node_modules/ms/readme.md b/node_modules/superagent/node_modules/ms/readme.md new file mode 100644 index 0000000..9a1996b --- /dev/null +++ b/node_modules/superagent/node_modules/ms/readme.md @@ -0,0 +1,60 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Join the community on Spectrum](https://withspectrum.github.io/badge/badge.svg)](https://spectrum.chat/zeit) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/superagent/package.json b/node_modules/superagent/package.json new file mode 100644 index 0000000..b65762b --- /dev/null +++ b/node_modules/superagent/package.json @@ -0,0 +1,136 @@ +{ + "name": "superagent", + "description": "elegant & feature rich browser / node HTTP with a fluent API", + "version": "7.1.5", + "author": "TJ Holowaychuk ", + "browser": { + "./src/node/index.js": "./src/client.js", + "./lib/node/index.js": "./lib/client.js", + "./test/support/server.js": "./test/support/blank.js", + "semver": false + }, + "bugs": { + "url": "https://github.com/visionmedia/superagent/issues" + }, + "contributors": [ + "Kornel Lesiński ", + "Peter Lyons ", + "Hunter Loftis ", + "Nick Baugh " + ], + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "devDependencies": { + "@babel/cli": "^7.17.10", + "@babel/core": "^7.18.2", + "@babel/plugin-transform-runtime": "^7.18.2", + "@babel/preset-env": "^7.18.2", + "@babel/runtime": "^7.18.3", + "@commitlint/cli": "^17.0.1", + "@commitlint/config-conventional": "^17.0.0", + "Base64": "^1.1.0", + "babelify": "^10.0.0", + "basic-auth-connect": "^1.0.0", + "body-parser": "^1.20.0", + "browserify": "^17.0.0", + "codecov": "^3.8.3", + "cookie-parser": "^1.4.6", + "cross-env": "^7.0.3", + "eslint": "^8.16.0", + "eslint-config-xo-lass": "^1.0.6", + "eslint-plugin-compat": "^4.0.2", + "eslint-plugin-node": "^11.1.0", + "express": "^4.18.1", + "express-session": "^1.17.3", + "fixpack": "^4.0.0", + "get-port": "4.2.0", + "husky": "^8.0.1", + "lint-staged": "^12.5.0", + "marked": "^2.0.0", + "mocha": "6.2.2", + "multer": "^1.4.4", + "nyc": "^15.1.0", + "remark-cli": "^10.0.1", + "remark-preset-github": "^4.0.1", + "rimraf": "^3.0.2", + "should": "^13.2.3", + "should-http": "^0.1.1", + "tinyify": "^3.0.0", + "xo": "^0.49.0", + "zuul": "^3.12.0" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + }, + "files": [ + "dist/*.js", + "lib/**/*.js" + ], + "homepage": "https://github.com/visionmedia/superagent", + "jsdelivr": "dist/superagent.min.js", + "keywords": [ + "agent", + "ajax", + "ajax", + "api", + "async", + "await", + "axios", + "cancel", + "client", + "frisbee", + "got", + "http", + "http", + "https", + "ky", + "promise", + "promise", + "promises", + "request", + "request", + "requests", + "response", + "rest", + "retry", + "super", + "superagent", + "timeout", + "transform", + "xhr", + "xmlhttprequest" + ], + "license": "MIT", + "main": "lib/node/index.js", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/superagent.git" + }, + "scripts": { + "browserify": "browserify src/node/index.js -o dist/superagent.js -s superagent -g [ babelify --configFile ./.dist.babelrc ]", + "build": "npm run build:clean && npm run build:lib && npm run build:dist", + "build:clean": "rimraf lib dist", + "build:dist": "npm run browserify && npm run minify", + "build:lib": "babel --config-file ./.lib.babelrc src --out-dir lib", + "build:test": "babel --config-file ./.test.babelrc test --out-dir lib/node/test", + "coverage": "nyc report --reporter=text-lcov > coverage.lcov && codecov", + "lint": "eslint -c .eslintrc src test && remark . -qfo && eslint -c .lib.eslintrc lib/**/*.js && eslint -c .dist.eslintrc dist/**/*.js", + "minify": "cross-env NODE_ENV=production browserify src/node/index.js -o dist/superagent.min.js -s superagent -g [ babelify --configFile ./.dist.babelrc ] -p tinyify", + "nyc": "cross-env NODE_ENV=test nyc ava", + "prepare": "husky install", + "test": "npm run build && npm run lint && make test", + "test-http2": "npm run build && npm run lint && make test-node-http2" + }, + "unpkg": "dist/superagent.min.js" +} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..2e79f89 --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/node_modules/validator/LICENSE b/node_modules/validator/LICENSE new file mode 100644 index 0000000..4e49a38 --- /dev/null +++ b/node_modules/validator/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2018 Chris O'Hara + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/validator/README.md b/node_modules/validator/README.md new file mode 100644 index 0000000..2cf6505 --- /dev/null +++ b/node_modules/validator/README.md @@ -0,0 +1,274 @@ +# validator.js + +[![NPM version][npm-image]][npm-url] +[![CI][ci-image]][ci-url] +[![Coverage][codecov-image]][codecov-url] +[![Downloads][downloads-image]][npm-url] +[![Backers on Open Collective](https://opencollective.com/validatorjs/backers/badge.svg)](#backers) +[![Sponsors on Open Collective](https://opencollective.com/validatorjs/sponsors/badge.svg)](#sponsors) +[![Gitter](https://badges.gitter.im/validatorjs/community.svg)](https://gitter.im/validatorjs/community) + +A library of string validators and sanitizers. + +## Strings only + +**This library validates and sanitizes strings only.** + +If you're not sure if your input is a string, coerce it using `input + ''`. +Passing anything other than a string will result in an error. + +## Installation and Usage + +### Server-side usage + +Install the library with `npm install validator` + +#### No ES6 + +```javascript +var validator = require('validator'); + +validator.isEmail('foo@bar.com'); //=> true +``` + +#### ES6 + +```javascript +import validator from 'validator'; +``` + +Or, import only a subset of the library: + +```javascript +import isEmail from 'validator/lib/isEmail'; +``` + +#### Tree-shakeable ES imports + +```javascript +import isEmail from 'validator/es/lib/isEmail'; +``` + +### Client-side usage + +The library can be loaded either as a standalone script, or through an [AMD][amd]-compatible loader + +```html + + +``` + +The library can also be installed through [bower][bower] + +```bash +$ bower install validator-js +``` + +CDN + +```html + +``` + +## Contributors + +[Become a backer](https://opencollective.com/validatorjs#backer) + +[Become a sponsor](https://opencollective.com/validatorjs#sponsor) + +Thank you to the people who have already contributed: + + + +## Validators + +Here is a list of the validators currently available. + +Validator | Description +--------------------------------------- | -------------------------------------- +**contains(str, seed [, options ])** | check if the string contains the seed.

`options` is an object that defaults to `{ ignoreCase: false, minOccurrences: 1 }`.
Options:
`ignoreCase`: Ignore case when doing comparison, default false
`minOccurences`: Minimum number of occurrences for the seed in the string. Defaults to 1. +**equals(str, comparison)** | check if the string matches the comparison. +**isAfter(str [, date])** | check if the string is a date that's after the specified date (defaults to now). +**isAlpha(str [, locale, options])** | check if the string contains only letters (a-zA-Z).

Locale is one of `['ar', 'ar-AE', 'ar-BH', 'ar-DZ', 'ar-EG', 'ar-IQ', 'ar-JO', 'ar-KW', 'ar-LB', 'ar-LY', 'ar-MA', 'ar-QA', 'ar-QM', 'ar-SA', 'ar-SD', 'ar-SY', 'ar-TN', 'ar-YE', 'bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'el-GR', 'en-AU', 'en-GB', 'en-HK', 'en-IN', 'en-NZ', 'en-US', 'en-ZA', 'en-ZM', 'es-ES', 'fa-IR', 'fi-FI', 'fr-CA', 'fr-FR', 'he', 'hi-IN', 'hu-HU', 'it-IT', 'ku-IQ', 'nb-NO', 'nl-NL', 'nn-NO', 'pl-PL', 'pt-BR', 'pt-PT', 'ru-RU', 'sl-SI', 'sk-SK', 'sr-RS', 'sr-RS@latin', 'sv-SE', 'tr-TR', 'uk-UA']`) and defaults to `en-US`. Locale list is `validator.isAlphaLocales`. options is an optional object that can be supplied with the following key(s): ignore which can either be a String or RegExp of characters to be ignored e.g. " -" will ignore spaces and -'s. +**isAlphanumeric(str [, locale, options])** | check if the string contains only letters and numbers (a-zA-Z0-9).

Locale is one of `['ar', 'ar-AE', 'ar-BH', 'ar-DZ', 'ar-EG', 'ar-IQ', 'ar-JO', 'ar-KW', 'ar-LB', 'ar-LY', 'ar-MA', 'ar-QA', 'ar-QM', 'ar-SA', 'ar-SD', 'ar-SY', 'ar-TN', 'ar-YE', 'bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'el-GR', 'en-AU', 'en-GB', 'en-HK', 'en-IN', 'en-NZ', 'en-US', 'en-ZA', 'en-ZM', 'es-ES', 'fa-IR', 'fi-FI', 'fr-CA', 'fr-FR', 'he', 'hi-IN', 'hu-HU', 'it-IT', 'ku-IQ', 'nb-NO', 'nl-NL', 'nn-NO', 'pl-PL', 'pt-BR', 'pt-PT', 'ru-RU', 'sl-SI', 'sk-SK', 'sr-RS', 'sr-RS@latin', 'sv-SE', 'tr-TR', 'uk-UA']`) and defaults to `en-US`. Locale list is `validator.isAlphanumericLocales`. options is an optional object that can be supplied with the following key(s): ignore which can either be a String or RegExp of characters to be ignored e.g. " -" will ignore spaces and -'s. +**isAscii(str)** | check if the string contains ASCII chars only. +**isBase32(str)** | check if a string is base32 encoded. +**isBase58(str)** | check if a string is base58 encoded. +**isBase64(str [, options])** | check if a string is base64 encoded. options is optional and defaults to `{urlSafe: false}`
when `urlSafe` is true it tests the given base64 encoded string is [url safe](https://base64.guru/standards/base64url) +**isBefore(str [, date])** | check if the string is a date that's before the specified date. +**isBIC(str)** | check if a string is a BIC (Bank Identification Code) or SWIFT code. +**isBoolean(str [, options])** | check if a string is a boolean.
`options` is an object which defaults to `{ loose: false }`. If loose is is set to false, the validator will strictly match ['true', 'false', '0', '1']. If loose is set to true, the validator will also match 'yes', 'no', and will match a valid boolean string of any case. (eg: ['true', 'True', 'TRUE']). +**isBtcAddress(str)** | check if the string is a valid BTC address. +**isByteLength(str [, options])** | check if the string's length (in UTF-8 bytes) falls in a range.

`options` is an object which defaults to `{min:0, max: undefined}`. +**isCreditCard(str)** | check if the string is a credit card. +**isCurrency(str [, options])** | check if the string is a valid currency amount.

`options` is an object which defaults to `{symbol: '$', require_symbol: false, allow_space_after_symbol: false, symbol_after_digits: false, allow_negatives: true, parens_for_negatives: false, negative_sign_before_digits: false, negative_sign_after_digits: false, allow_negative_sign_placeholder: false, thousands_separator: ',', decimal_separator: '.', allow_decimal: true, require_decimal: false, digits_after_decimal: [2], allow_space_after_digits: false}`.
**Note:** The array `digits_after_decimal` is filled with the exact number of digits allowed not a range, for example a range 1 to 3 will be given as [1, 2, 3]. +**isDataURI(str)** | check if the string is a [data uri format](https://developer.mozilla.org/en-US/docs/Web/HTTP/data_URIs). +**isDate(input [, options])** | Check if the input is a valid date. e.g. [`2002-07-15`, new Date()].

`options` is an object which can contain the keys `format`, `strictMode` and/or `delimiters`

`format` is a string and defaults to `YYYY/MM/DD`.

`strictMode` is a boolean and defaults to `false`. If `strictMode` is set to true, the validator will reject inputs different from `format`.

`delimiters` is an array of allowed date delimiters and defaults to `['/', '-']`. +**isDecimal(str [, options])** | check if the string represents a decimal number, such as 0.1, .3, 1.1, 1.00003, 4.0, etc.

`options` is an object which defaults to `{force_decimal: false, decimal_digits: '1,', locale: 'en-US'}`

`locale` determine the decimal separator and is one of `['ar', 'ar-AE', 'ar-BH', 'ar-DZ', 'ar-EG', 'ar-IQ', 'ar-JO', 'ar-KW', 'ar-LB', 'ar-LY', 'ar-MA', 'ar-QA', 'ar-QM', 'ar-SA', 'ar-SD', 'ar-SY', 'ar-TN', 'ar-YE', 'bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'el-GR', 'en-AU', 'en-GB', 'en-HK', 'en-IN', 'en-NZ', 'en-US', 'en-ZA', 'en-ZM', 'es-ES', 'fa', 'fa-AF', 'fa-IR', 'fr-FR', 'fr-CA', 'hu-HU', 'id-ID', 'it-IT', 'ku-IQ', 'nb-NO', 'nl-NL', 'nn-NO', 'pl-PL', 'pl-Pl', 'pt-BR', 'pt-PT', 'ru-RU', 'sl-SI', 'sr-RS', 'sr-RS@latin', 'sv-SE', 'tr-TR', 'uk-UA', 'vi-VN']`.
**Note:** `decimal_digits` is given as a range like '1,3', a specific value like '3' or min like '1,'. +**isDivisibleBy(str, number)** | check if the string is a number that's divisible by another. +**isEAN(str)** | check if the string is an EAN (European Article Number). +**isEmail(str [, options])** | check if the string is an email.

`options` is an object which defaults to `{ allow_display_name: false, require_display_name: false, allow_utf8_local_part: true, require_tld: true, allow_ip_domain: false, domain_specific_validation: false, blacklisted_chars: '', host_blacklist: [] }`. If `allow_display_name` is set to true, the validator will also match `Display Name `. If `require_display_name` is set to true, the validator will reject strings without the format `Display Name `. If `allow_utf8_local_part` is set to false, the validator will not allow any non-English UTF8 character in email address' local part. If `require_tld` is set to false, e-mail addresses without having TLD in their domain will also be matched. If `ignore_max_length` is set to true, the validator will not check for the standard max length of an email. If `allow_ip_domain` is set to true, the validator will allow IP addresses in the host part. If `domain_specific_validation` is true, some additional validation will be enabled, e.g. disallowing certain syntactically valid email addresses that are rejected by GMail. If `blacklisted_chars` receives a string, then the validator will reject emails that include any of the characters in the string, in the name part. If `host_blacklist` is set to an array of strings and the part of the email after the `@` symbol matches one of the strings defined in it, the validation fails. +**isEmpty(str [, options])** | check if the string has a length of zero.

`options` is an object which defaults to `{ ignore_whitespace:false }`. +**isEthereumAddress(str)** | check if the string is an [Ethereum](https://ethereum.org/) address using basic regex. Does not validate address checksums. +**isFloat(str [, options])** | check if the string is a float.

`options` is an object which can contain the keys `min`, `max`, `gt`, and/or `lt` to validate the float is within boundaries (e.g. `{ min: 7.22, max: 9.55 }`) it also has `locale` as an option.

`min` and `max` are equivalent to 'greater or equal' and 'less or equal', respectively while `gt` and `lt` are their strict counterparts.

`locale` determine the decimal separator and is one of `['ar', 'ar-AE', 'ar-BH', 'ar-DZ', 'ar-EG', 'ar-IQ', 'ar-JO', 'ar-KW', 'ar-LB', 'ar-LY', 'ar-MA', 'ar-QA', 'ar-QM', 'ar-SA', 'ar-SD', 'ar-SY', 'ar-TN', 'ar-YE', 'bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'en-AU', 'en-GB', 'en-HK', 'en-IN', 'en-NZ', 'en-US', 'en-ZA', 'en-ZM', 'es-ES', 'fr-CA', 'fr-FR', 'hu-HU', 'it-IT', 'nb-NO', 'nl-NL', 'nn-NO', 'pl-PL', 'pt-BR', 'pt-PT', 'ru-RU', 'sl-SI', 'sr-RS', 'sr-RS@latin', 'sv-SE', 'tr-TR', 'uk-UA']`. Locale list is `validator.isFloatLocales`. +**isFQDN(str [, options])** | check if the string is a fully qualified domain name (e.g. domain.com).

`options` is an object which defaults to `{ require_tld: true, allow_underscores: false, allow_trailing_dot: false, allow_numeric_tld: false, allow_wildcard: false }`. If `allow_wildcard` is set to true, the validator will allow domain starting with `*.` (e.g. `*.example.com` or `*.shop.example.com`). +**isFullWidth(str)** | check if the string contains any full-width chars. +**isHalfWidth(str)** | check if the string contains any half-width chars. +**isHash(str, algorithm)** | check if the string is a hash of type algorithm.

Algorithm is one of `['md4', 'md5', 'sha1', 'sha256', 'sha384', 'sha512', 'ripemd128', 'ripemd160', 'tiger128', 'tiger160', 'tiger192', 'crc32', 'crc32b']` +**isHexadecimal(str)** | check if the string is a hexadecimal number. +**isHexColor(str)** | check if the string is a hexadecimal color. +**isHSL(str)** | check if the string is an HSL (hue, saturation, lightness, optional alpha) color based on [CSS Colors Level 4 specification](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value).

Comma-separated format supported. Space-separated format supported with the exception of a few edge cases (ex: `hsl(200grad+.1%62%/1)`). +**isIBAN(str)** | check if a string is a IBAN (International Bank Account Number). +**isIdentityCard(str [, locale])** | check if the string is a valid identity card code.

`locale` is one of `['LK', 'PL', 'ES', 'FI', 'IN', 'IT', 'IR', 'MZ', 'NO', 'TH', 'zh-TW', 'he-IL', 'ar-LY', 'ar-TN', 'zh-CN']` OR `'any'`. If 'any' is used, function will check if any of the locals match.

Defaults to 'any'. +**isIMEI(str [, options]))** | check if the string is a valid IMEI number. Imei should be of format `###############` or `##-######-######-#`.

`options` is an object which can contain the keys `allow_hyphens`. Defaults to first format . If allow_hyphens is set to true, the validator will validate the second format. +**isIn(str, values)** | check if the string is in a array of allowed values. +**isInt(str [, options])** | check if the string is an integer.

`options` is an object which can contain the keys `min` and/or `max` to check the integer is within boundaries (e.g. `{ min: 10, max: 99 }`). `options` can also contain the key `allow_leading_zeroes`, which when set to false will disallow integer values with leading zeroes (e.g. `{ allow_leading_zeroes: false }`). Finally, `options` can contain the keys `gt` and/or `lt` which will enforce integers being greater than or less than, respectively, the value provided (e.g. `{gt: 1, lt: 4}` for a number between 1 and 4). +**isIP(str [, version])** | check if the string is an IP (version 4 or 6). +**isIPRange(str [, version])** | check if the string is an IP Range (version 4 or 6). +**isISBN(str [, version])** | check if the string is an ISBN (version 10 or 13). +**isISIN(str)** | check if the string is an [ISIN][ISIN] (stock/security identifier). +**isISO8601(str)** | check if the string is a valid [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) date.
`options` is an object which defaults to `{ strict: false, strictSeparator: false }`. If `strict` is true, date strings with invalid dates like `2009-02-29` will be invalid. If `strictSeparator` is true, date strings with date and time separated by anything other than a T will be invalid. +**isISO31661Alpha2(str)** | check if the string is a valid [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) officially assigned country code. +**isISO31661Alpha3(str)** | check if the string is a valid [ISO 3166-1 alpha-3](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3) officially assigned country code. +**isISO4217(str)** | check if the string is a valid [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217) officially assigned currency code. +**isISRC(str)** | check if the string is a [ISRC](https://en.wikipedia.org/wiki/International_Standard_Recording_Code). +**isISSN(str [, options])** | check if the string is an [ISSN](https://en.wikipedia.org/wiki/International_Standard_Serial_Number).

`options` is an object which defaults to `{ case_sensitive: false, require_hyphen: false }`. If `case_sensitive` is true, ISSNs with a lowercase `'x'` as the check digit are rejected. +**isJSON(str [, options])** | check if the string is valid JSON (note: uses JSON.parse).

`options` is an object which defaults to `{ allow_primitives: false }`. If `allow_primitives` is true, the primitives 'true', 'false' and 'null' are accepted as valid JSON values. +**isJWT(str)** | check if the string is valid JWT token. +**isLatLong(str [, options])** | check if the string is a valid latitude-longitude coordinate in the format `lat,long` or `lat, long`.

`options` is an object that defaults to `{ checkDMS: false }`. Pass `checkDMS` as `true` to validate DMS(degrees, minutes, and seconds) latitude-longitude format. +**isLength(str [, options])** | check if the string's length falls in a range.

`options` is an object which defaults to `{min:0, max: undefined}`. Note: this function takes into account surrogate pairs. +**isLicensePlate(str [, locale])** | check if string matches the format of a country's license plate.

(locale is one of `['cs-CZ', 'de-DE', 'de-LI', 'fi-FI', pt-PT', 'sq-AL', 'pt-BR']` or `any`) +**isLocale(str)** | check if the string is a locale +**isLowercase(str)** | check if the string is lowercase. +**isMACAddress(str)** | check if the string is a MAC address.

`options` is an object which defaults to `{no_separators: false}`. If `no_separators` is true, the validator will allow MAC addresses without separators. Also, it allows the use of hyphens, spaces or dots e.g '01 02 03 04 05 ab', '01-02-03-04-05-ab' or '0102.0304.05ab'. +**isMagnetURI(str)** | check if the string is a [magnet uri format](https://en.wikipedia.org/wiki/Magnet_URI_scheme). +**isMD5(str)** | check if the string is a MD5 hash.

Please note that you can also use the `isHash(str, 'md5')` function. Keep in mind that MD5 has some collision weaknesses compared to other algorithms (e.g., SHA). +**isMimeType(str)** | check if the string matches to a valid [MIME type](https://en.wikipedia.org/wiki/Media_type) format +**isMobilePhone(str [, locale [, options]])** | check if the string is a mobile phone number,

(locale is either an array of locales (e.g `['sk-SK', 'sr-RS']`) OR one of `['am-Am', 'ar-AE', 'ar-BH', 'ar-DZ', 'ar-EG', 'ar-IQ', ar-JO', 'ar-KW', 'ar-PS', 'ar-SA', 'ar-SY', 'ar-TN', 'az-AZ', 'az-LY', 'az-LB', 'bs-BA', 'be-BY', 'bg-BG', 'bn-BD', 'ca-AD', 'cs-CZ', 'da-DK', 'de-DE', 'de-AT', 'de-CH', 'de-LU', 'dv-MV', 'el-GR', 'en-AU', 'en-BM', 'en-BW', 'en-CA', 'en-GB', 'en-GG', 'en-GH', 'en-GY', 'en-HK', 'en-MO', 'en-IE', 'en-IN', 'en-KE', 'en-KI', 'en-MT', 'en-MU', 'en-NG', 'en-NZ', 'en-PK', 'en-PH', 'en-RW', 'en-SG', 'en-SL', 'en-UG', 'en-US', 'en-TZ', 'en-ZA', 'en-ZM', 'en-ZW', 'es-AR', 'es-BO', 'es-CL', 'es-CO', 'es-CR', 'es-CU', 'es-DO', 'es-HN', 'es-PE', 'es-EC', 'es-ES', 'es-MX', 'es-PA', 'es-PY', 'es-SV', 'es-UY', 'es-VE', 'et-EE', 'fa-IR', 'fi-FI', 'fj-FJ', 'fo-FO', 'fr-BE', 'fr-BF', 'fr-FR', 'fr-GF', 'fr-GP', 'fr-MQ', 'fr-PF', 'fr-RE', 'ga-IE', 'he-IL', 'hu-HU', 'id-ID', 'it-IT', 'it-SM', 'ja-JP', 'ka-GE', 'kk-KZ', 'kl-GL', 'ko-KR', 'lt-LT', 'ms-MY', ''mz-MZ', nb-NO', 'ne-NP', 'nl-BE', 'nl-NL', 'nn-NO', 'pl-PL', 'pt-BR', 'pt-PT', 'pt-AO', 'ro-RO', 'ru-RU', 'si-LK' 'sl-SI', 'sk-SK', 'sq-AL', 'sr-RS', 'sv-SE', 'tg-TJ', 'th-TH', 'tk-TM', 'tr-TR', 'uk-UA', 'uz-UZ', 'vi-VN', 'zh-CN', 'zh-HK', 'zh-MO', 'zh-TW', 'dz-BT']` OR defaults to 'any'. If 'any' or a falsey value is used, function will check if any of the locales match).

`options` is an optional object that can be supplied with the following keys: `strictMode`, if this is set to `true`, the mobile phone number must be supplied with the country code and therefore must start with `+`. Locale list is `validator.isMobilePhoneLocales`. +**isMongoId(str)** | check if the string is a valid hex-encoded representation of a [MongoDB ObjectId][mongoid]. +**isMultibyte(str)** | check if the string contains one or more multibyte chars. +**isNumeric(str [, options])** | check if the string contains only numbers.

`options` is an object which defaults to `{no_symbols: false}` it also has locale as an option. If `no_symbols` is true, the validator will reject numeric strings that feature a symbol (e.g. `+`, `-`, or `.`).

`locale` determine the decimal separator and is one of `['ar', 'ar-AE', 'ar-BH', 'ar-DZ', 'ar-EG', 'ar-IQ', 'ar-JO', 'ar-KW', 'ar-LB', 'ar-LY', 'ar-MA', 'ar-QA', 'ar-QM', 'ar-SA', 'ar-SD', 'ar-SY', 'ar-TN', 'ar-YE', 'bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'en-AU', 'en-GB', 'en-HK', 'en-IN', 'en-NZ', 'en-US', 'en-ZA', 'en-ZM', 'es-ES', 'fr-FR', 'fr-CA', 'hu-HU', 'it-IT', 'nb-NO', 'nl-NL', 'nn-NO', 'pl-PL', 'pt-BR', 'pt-PT', 'ru-RU', 'sl-SI', 'sr-RS', 'sr-RS@latin', 'sv-SE', 'tr-TR', 'uk-UA']`. +**isOctal(str)** | check if the string is a valid octal number. +**isPassportNumber(str, countryCode)** | check if the string is a valid passport number.

(countryCode is one of `[ 'AM', 'AR', 'AT', 'AU', 'BE', 'BG', 'BY', 'BR', 'CA', 'CH', 'CN', 'CY', 'CZ', 'DE', 'DK', 'DZ', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE' 'IN', 'IR', 'ID', 'IS', 'IT', 'JP', 'KR', 'LT', 'LU', 'LV', 'LY', 'MT', 'MY', 'MZ', 'NL', 'PL', 'PT', 'RO', 'RU', 'SE', 'SL', 'SK', 'TR', 'UA', 'US' ]`. +**isPort(str)** | check if the string is a valid port number. +**isPostalCode(str, locale)** | check if the string is a postal code,

(locale is one of `[ 'AD', 'AT', 'AU', 'AZ', 'BE', 'BG', 'BR', 'BY', 'CA', 'CH', 'CN', 'CZ', 'DE', 'DK', 'DO', 'DZ', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HT', 'HU', 'ID', 'IE' 'IL', 'IN', 'IR', 'IS', 'IT', 'JP', 'KE', 'KR', 'LI', 'LK', 'LT', 'LU', 'LV', 'MT', 'MX', 'MY', 'NL', 'NO', 'NP', 'NZ', 'PL', 'PR', 'PT', 'RO', 'RU', 'SA', 'SE', 'SG', 'SI', 'TH', 'TN', 'TW', 'UA', 'US', 'ZA', 'ZM' ]` OR 'any'. If 'any' is used, function will check if any of the locals match. Locale list is `validator.isPostalCodeLocales`.). +**isRFC3339(str)** | check if the string is a valid [RFC 3339](https://tools.ietf.org/html/rfc3339) date. +**isRgbColor(str [, includePercentValues])** | check if the string is a rgb or rgba color.

`includePercentValues` defaults to `true`. If you don't want to allow to set `rgb` or `rgba` values with percents, like `rgb(5%,5%,5%)`, or `rgba(90%,90%,90%,.3)`, then set it to false. +**isSemVer(str)** | check if the string is a Semantic Versioning Specification (SemVer). +**isSurrogatePair(str)** | check if the string contains any surrogate pairs chars. +**isUppercase(str)** | check if the string is uppercase. +**isSlug** | Check if the string is of type slug. `Options` allow a single hyphen between string. e.g. [`cn-cn`, `cn-c-c`] +**isStrongPassword(str [, options])** | Check if a password is strong or not. Allows for custom requirements or scoring rules. If `returnScore` is true, then the function returns an integer score for the password rather than a boolean.
Default options:
`{ minLength: 8, minLowercase: 1, minUppercase: 1, minNumbers: 1, minSymbols: 1, returnScore: false, pointsPerUnique: 1, pointsPerRepeat: 0.5, pointsForContainingLower: 10, pointsForContainingUpper: 10, pointsForContainingNumber: 10, pointsForContainingSymbol: 10 }` +**isTaxID(str, locale)** | Check if the given value is a valid Tax Identification Number. Default locale is `en-US`.

More info about exact TIN support can be found in `src/lib/isTaxID.js`

Supported locales: `[ 'bg-BG', 'cs-CZ', 'de-AT', 'de-DE', 'dk-DK', 'el-CY', 'el-GR', 'en-GB', 'en-IE', 'en-US', 'es-ES', 'et-EE', 'fi-FI', 'fr-BE', 'fr-FR', 'fr-LU', 'hr-HR', 'hu-HU', 'it-IT', 'lb-LU', 'lt-LT', 'lv-LV' 'mt-MT', 'nl-BE', 'nl-NL', 'pl-PL', 'pt-BR', 'pt-PT', 'ro-RO', 'sk-SK', 'sl-SI', 'sv-SE' ]` +**isURL(str [, options])** | check if the string is an URL.

`options` is an object which defaults to `{ protocols: ['http','https','ftp'], require_tld: true, require_protocol: false, require_host: true, require_port: false, require_valid_protocol: true, allow_underscores: false, host_whitelist: false, host_blacklist: false, allow_trailing_dot: false, allow_protocol_relative_urls: false, allow_fragments: true, allow_query_components: true, disallow_auth: false, validate_length: true }`.

require_protocol - if set as true isURL will return false if protocol is not present in the URL.
require_valid_protocol - isURL will check if the URL's protocol is present in the protocols option.
protocols - valid protocols can be modified with this option.
require_host - if set as false isURL will not check if host is present in the URL.
require_port - if set as true isURL will check if port is present in the URL.
allow_protocol_relative_urls - if set as true protocol relative URLs will be allowed.
allow_fragments - if set as false isURL will return false if fragments are present.
allow_query_components - if set as false isURL will return false if query components are present.
validate_length - if set as false isURL will skip string length validation (2083 characters is IE max URL length). +**isUUID(str [, version])** | check if the string is a UUID (version 1, 2, 3, 4 or 5). +**isVariableWidth(str)** | check if the string contains a mixture of full and half-width chars. +**isVAT(str, countryCode)** | checks that the string is a [valid VAT number](https://en.wikipedia.org/wiki/VAT_identification_number) if validation is available for the given country code matching [ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2).

Available country codes: `[ 'GB', 'IT','NL' ]`. +**isWhitelisted(str, chars)** | checks characters if they appear in the whitelist. +**matches(str, pattern [, modifiers])** | check if string matches the pattern.

Either `matches('foo', /foo/i)` or `matches('foo', 'foo', 'i')`. + +## Sanitizers + +Here is a list of the sanitizers currently available. + +Sanitizer | Description +-------------------------------------- | ------------------------------- +**blacklist(input, chars)** | remove characters that appear in the blacklist. The characters are used in a RegExp and so you will need to escape some chars, e.g. `blacklist(input, '\\[\\]')`. +**escape(input)** | replace `<`, `>`, `&`, `'`, `"` and `/` with HTML entities. +**ltrim(input [, chars])** | trim characters from the left-side of the input. +**normalizeEmail(email [, options])** | canonicalizes an email address. (This doesn't validate that the input is an email, if you want to validate the email use isEmail beforehand)

`options` is an object with the following keys and default values:
  • *all_lowercase: true* - Transforms the local part (before the @ symbol) of all email addresses to lowercase. Please note that this may violate RFC 5321, which gives providers the possibility to treat the local part of email addresses in a case sensitive way (although in practice most - yet not all - providers don't). The domain part of the email address is always lowercased, as it's case insensitive per RFC 1035.
  • *gmail_lowercase: true* - GMail addresses are known to be case-insensitive, so this switch allows lowercasing them even when *all_lowercase* is set to false. Please note that when *all_lowercase* is true, GMail addresses are lowercased regardless of the value of this setting.
  • *gmail_remove_dots: true*: Removes dots from the local part of the email address, as GMail ignores them (e.g. "john.doe" and "johndoe" are considered equal).
  • *gmail_remove_subaddress: true*: Normalizes addresses by removing "sub-addresses", which is the part following a "+" sign (e.g. "foo+bar@gmail.com" becomes "foo@gmail.com").
  • *gmail_convert_googlemaildotcom: true*: Converts addresses with domain @googlemail.com to @gmail.com, as they're equivalent.
  • *outlookdotcom_lowercase: true* - Outlook.com addresses (including Windows Live and Hotmail) are known to be case-insensitive, so this switch allows lowercasing them even when *all_lowercase* is set to false. Please note that when *all_lowercase* is true, Outlook.com addresses are lowercased regardless of the value of this setting.
  • *outlookdotcom_remove_subaddress: true*: Normalizes addresses by removing "sub-addresses", which is the part following a "+" sign (e.g. "foo+bar@outlook.com" becomes "foo@outlook.com").
  • *yahoo_lowercase: true* - Yahoo Mail addresses are known to be case-insensitive, so this switch allows lowercasing them even when *all_lowercase* is set to false. Please note that when *all_lowercase* is true, Yahoo Mail addresses are lowercased regardless of the value of this setting.
  • *yahoo_remove_subaddress: true*: Normalizes addresses by removing "sub-addresses", which is the part following a "-" sign (e.g. "foo-bar@yahoo.com" becomes "foo@yahoo.com").
  • *icloud_lowercase: true* - iCloud addresses (including MobileMe) are known to be case-insensitive, so this switch allows lowercasing them even when *all_lowercase* is set to false. Please note that when *all_lowercase* is true, iCloud addresses are lowercased regardless of the value of this setting.
  • *icloud_remove_subaddress: true*: Normalizes addresses by removing "sub-addresses", which is the part following a "+" sign (e.g. "foo+bar@icloud.com" becomes "foo@icloud.com").
+**rtrim(input [, chars])** | trim characters from the right-side of the input. +**stripLow(input [, keep_new_lines])** | remove characters with a numerical value < 32 and 127, mostly control characters. If `keep_new_lines` is `true`, newline characters are preserved (`\n` and `\r`, hex `0xA` and `0xD`). Unicode-safe in JavaScript. +**toBoolean(input [, strict])** | convert the input string to a boolean. Everything except for `'0'`, `'false'` and `''` returns `true`. In strict mode only `'1'` and `'true'` return `true`. +**toDate(input)** | convert the input string to a date, or `null` if the input is not a date. +**toFloat(input)** | convert the input string to a float, or `NaN` if the input is not a float. +**toInt(input [, radix])** | convert the input string to an integer, or `NaN` if the input is not an integer. +**trim(input [, chars])** | trim characters (whitespace by default) from both sides of the input. +**unescape(input)** | replaces HTML encoded entities with `<`, `>`, `&`, `'`, `"` and `/`. +**whitelist(input, chars)** | remove characters that do not appear in the whitelist. The characters are used in a RegExp and so you will need to escape some chars, e.g. `whitelist(input, '\\[\\]')`. + +### XSS Sanitization + +XSS sanitization was removed from the library in [2d5d6999](https://github.com/validatorjs/validator.js/commit/2d5d6999541add350fb396ef02dc42ca3215049e). + +For an alternative, have a look at Yahoo's [xss-filters library](https://github.com/yahoo/xss-filters) or at [DOMPurify](https://github.com/cure53/DOMPurify). + +## Contributing + +In general, we follow the "fork-and-pull" Git workflow. + +1. Fork the repo on GitHub +2. Clone the project to your own machine +3. Work on your fork + 1. Make your changes and additions + - Most of your changes should be focused on `src/` and `test/` folders and/or `README.md`. + - Files such as `validator.js`, `validator.min.js` and files in `lib/` folder are autogenerated when running tests (`npm test`) and need not to be changed **manually**. + 2. Change or add tests if needed + 3. Run tests and make sure they pass + 4. Add changes to README.md if needed +4. Commit changes to your own branch +5. **Make sure** you merge the latest from "upstream" and resolve conflicts if there is any +6. Repeat step 3(3) above +7. Push your work back up to your fork +8. Submit a Pull request so that we can review your changes + +## Tests + +Tests are using mocha, to run the tests use: + +```sh +$ npm test +``` + +## Maintainers + +- [chriso](https://github.com/chriso) - **Chris O'Hara** (author) +- [profnandaa](https://github.com/profnandaa) - **Anthony Nandaa** + +## Reading + +Remember, validating can be troublesome sometimes. See [A list of articles about programming assumptions commonly made that aren't true](https://github.com/jameslk/awesome-falsehoods). + +## License (MIT) + +``` +Copyright (c) 2018 Chris O'Hara + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +``` + +[downloads-image]: http://img.shields.io/npm/dm/validator.svg + +[npm-url]: https://npmjs.org/package/validator +[npm-image]: http://img.shields.io/npm/v/validator.svg + +[codecov-url]: https://codecov.io/gh/validatorjs/validator.js +[codecov-image]: https://codecov.io/gh/validatorjs/validator.js/branch/master/graph/badge.svg + +[ci-url]: https://github.com/validatorjs/validator.js/actions?query=workflow%3ACI +[ci-image]: https://github.com/validatorjs/validator.js/workflows/CI/badge.svg?branch=master + +[amd]: http://requirejs.org/docs/whyamd.html +[bower]: http://bower.io/ + +[mongoid]: http://docs.mongodb.org/manual/reference/object-id/ +[ISIN]: https://en.wikipedia.org/wiki/International_Securities_Identification_Number diff --git a/node_modules/validator/es/index.js b/node_modules/validator/es/index.js new file mode 100644 index 0000000..ed4eb79 --- /dev/null +++ b/node_modules/validator/es/index.js @@ -0,0 +1,198 @@ +import toDate from './lib/toDate'; +import toFloat from './lib/toFloat'; +import toInt from './lib/toInt'; +import toBoolean from './lib/toBoolean'; +import equals from './lib/equals'; +import contains from './lib/contains'; +import matches from './lib/matches'; +import isEmail from './lib/isEmail'; +import isURL from './lib/isURL'; +import isMACAddress from './lib/isMACAddress'; +import isIP from './lib/isIP'; +import isIPRange from './lib/isIPRange'; +import isFQDN from './lib/isFQDN'; +import isDate from './lib/isDate'; +import isBoolean from './lib/isBoolean'; +import isLocale from './lib/isLocale'; +import isAlpha, { locales as isAlphaLocales } from './lib/isAlpha'; +import isAlphanumeric, { locales as isAlphanumericLocales } from './lib/isAlphanumeric'; +import isNumeric from './lib/isNumeric'; +import isPassportNumber from './lib/isPassportNumber'; +import isPort from './lib/isPort'; +import isLowercase from './lib/isLowercase'; +import isUppercase from './lib/isUppercase'; +import isIMEI from './lib/isIMEI'; +import isAscii from './lib/isAscii'; +import isFullWidth from './lib/isFullWidth'; +import isHalfWidth from './lib/isHalfWidth'; +import isVariableWidth from './lib/isVariableWidth'; +import isMultibyte from './lib/isMultibyte'; +import isSemVer from './lib/isSemVer'; +import isSurrogatePair from './lib/isSurrogatePair'; +import isInt from './lib/isInt'; +import isFloat, { locales as isFloatLocales } from './lib/isFloat'; +import isDecimal from './lib/isDecimal'; +import isHexadecimal from './lib/isHexadecimal'; +import isOctal from './lib/isOctal'; +import isDivisibleBy from './lib/isDivisibleBy'; +import isHexColor from './lib/isHexColor'; +import isRgbColor from './lib/isRgbColor'; +import isHSL from './lib/isHSL'; +import isISRC from './lib/isISRC'; +import isIBAN, { locales as ibanLocales } from './lib/isIBAN'; +import isBIC from './lib/isBIC'; +import isMD5 from './lib/isMD5'; +import isHash from './lib/isHash'; +import isJWT from './lib/isJWT'; +import isJSON from './lib/isJSON'; +import isEmpty from './lib/isEmpty'; +import isLength from './lib/isLength'; +import isByteLength from './lib/isByteLength'; +import isUUID from './lib/isUUID'; +import isMongoId from './lib/isMongoId'; +import isAfter from './lib/isAfter'; +import isBefore from './lib/isBefore'; +import isIn from './lib/isIn'; +import isCreditCard from './lib/isCreditCard'; +import isIdentityCard from './lib/isIdentityCard'; +import isEAN from './lib/isEAN'; +import isISIN from './lib/isISIN'; +import isISBN from './lib/isISBN'; +import isISSN from './lib/isISSN'; +import isTaxID from './lib/isTaxID'; +import isMobilePhone, { locales as isMobilePhoneLocales } from './lib/isMobilePhone'; +import isEthereumAddress from './lib/isEthereumAddress'; +import isCurrency from './lib/isCurrency'; +import isBtcAddress from './lib/isBtcAddress'; +import isISO8601 from './lib/isISO8601'; +import isRFC3339 from './lib/isRFC3339'; +import isISO31661Alpha2 from './lib/isISO31661Alpha2'; +import isISO31661Alpha3 from './lib/isISO31661Alpha3'; +import isISO4217 from './lib/isISO4217'; +import isBase32 from './lib/isBase32'; +import isBase58 from './lib/isBase58'; +import isBase64 from './lib/isBase64'; +import isDataURI from './lib/isDataURI'; +import isMagnetURI from './lib/isMagnetURI'; +import isMimeType from './lib/isMimeType'; +import isLatLong from './lib/isLatLong'; +import isPostalCode, { locales as isPostalCodeLocales } from './lib/isPostalCode'; +import ltrim from './lib/ltrim'; +import rtrim from './lib/rtrim'; +import trim from './lib/trim'; +import escape from './lib/escape'; +import unescape from './lib/unescape'; +import stripLow from './lib/stripLow'; +import whitelist from './lib/whitelist'; +import blacklist from './lib/blacklist'; +import isWhitelisted from './lib/isWhitelisted'; +import normalizeEmail from './lib/normalizeEmail'; +import isSlug from './lib/isSlug'; +import isLicensePlate from './lib/isLicensePlate'; +import isStrongPassword from './lib/isStrongPassword'; +import isVAT from './lib/isVAT'; +var version = '13.7.0'; +var validator = { + version: version, + toDate: toDate, + toFloat: toFloat, + toInt: toInt, + toBoolean: toBoolean, + equals: equals, + contains: contains, + matches: matches, + isEmail: isEmail, + isURL: isURL, + isMACAddress: isMACAddress, + isIP: isIP, + isIPRange: isIPRange, + isFQDN: isFQDN, + isBoolean: isBoolean, + isIBAN: isIBAN, + isBIC: isBIC, + isAlpha: isAlpha, + isAlphaLocales: isAlphaLocales, + isAlphanumeric: isAlphanumeric, + isAlphanumericLocales: isAlphanumericLocales, + isNumeric: isNumeric, + isPassportNumber: isPassportNumber, + isPort: isPort, + isLowercase: isLowercase, + isUppercase: isUppercase, + isAscii: isAscii, + isFullWidth: isFullWidth, + isHalfWidth: isHalfWidth, + isVariableWidth: isVariableWidth, + isMultibyte: isMultibyte, + isSemVer: isSemVer, + isSurrogatePair: isSurrogatePair, + isInt: isInt, + isIMEI: isIMEI, + isFloat: isFloat, + isFloatLocales: isFloatLocales, + isDecimal: isDecimal, + isHexadecimal: isHexadecimal, + isOctal: isOctal, + isDivisibleBy: isDivisibleBy, + isHexColor: isHexColor, + isRgbColor: isRgbColor, + isHSL: isHSL, + isISRC: isISRC, + isMD5: isMD5, + isHash: isHash, + isJWT: isJWT, + isJSON: isJSON, + isEmpty: isEmpty, + isLength: isLength, + isLocale: isLocale, + isByteLength: isByteLength, + isUUID: isUUID, + isMongoId: isMongoId, + isAfter: isAfter, + isBefore: isBefore, + isIn: isIn, + isCreditCard: isCreditCard, + isIdentityCard: isIdentityCard, + isEAN: isEAN, + isISIN: isISIN, + isISBN: isISBN, + isISSN: isISSN, + isMobilePhone: isMobilePhone, + isMobilePhoneLocales: isMobilePhoneLocales, + isPostalCode: isPostalCode, + isPostalCodeLocales: isPostalCodeLocales, + isEthereumAddress: isEthereumAddress, + isCurrency: isCurrency, + isBtcAddress: isBtcAddress, + isISO8601: isISO8601, + isRFC3339: isRFC3339, + isISO31661Alpha2: isISO31661Alpha2, + isISO31661Alpha3: isISO31661Alpha3, + isISO4217: isISO4217, + isBase32: isBase32, + isBase58: isBase58, + isBase64: isBase64, + isDataURI: isDataURI, + isMagnetURI: isMagnetURI, + isMimeType: isMimeType, + isLatLong: isLatLong, + ltrim: ltrim, + rtrim: rtrim, + trim: trim, + escape: escape, + unescape: unescape, + stripLow: stripLow, + whitelist: whitelist, + blacklist: blacklist, + isWhitelisted: isWhitelisted, + normalizeEmail: normalizeEmail, + toString: toString, + isSlug: isSlug, + isStrongPassword: isStrongPassword, + isTaxID: isTaxID, + isDate: isDate, + isLicensePlate: isLicensePlate, + isVAT: isVAT, + ibanLocales: ibanLocales +}; +export default validator; \ No newline at end of file diff --git a/node_modules/validator/es/lib/alpha.js b/node_modules/validator/es/lib/alpha.js new file mode 100644 index 0000000..a1a9a76 --- /dev/null +++ b/node_modules/validator/es/lib/alpha.js @@ -0,0 +1,123 @@ +export var alpha = { + 'en-US': /^[A-Z]+$/i, + 'az-AZ': /^[A-VXYZÇƏĞİıÖŞÜ]+$/i, + 'bg-BG': /^[А-Я]+$/i, + 'cs-CZ': /^[A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i, + 'da-DK': /^[A-ZÆØÅ]+$/i, + 'de-DE': /^[A-ZÄÖÜß]+$/i, + 'el-GR': /^[Α-ώ]+$/i, + 'es-ES': /^[A-ZÁÉÍÑÓÚÜ]+$/i, + 'fa-IR': /^[ابپتثجچحخدذرزژسشصضطظعغفقکگلمنوهی]+$/i, + 'fi-FI': /^[A-ZÅÄÖ]+$/i, + 'fr-FR': /^[A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i, + 'it-IT': /^[A-ZÀÉÈÌÎÓÒÙ]+$/i, + 'nb-NO': /^[A-ZÆØÅ]+$/i, + 'nl-NL': /^[A-ZÁÉËÏÓÖÜÚ]+$/i, + 'nn-NO': /^[A-ZÆØÅ]+$/i, + 'hu-HU': /^[A-ZÁÉÍÓÖŐÚÜŰ]+$/i, + 'pl-PL': /^[A-ZĄĆĘŚŁŃÓŻŹ]+$/i, + 'pt-PT': /^[A-ZÃÁÀÂÄÇÉÊËÍÏÕÓÔÖÚÜ]+$/i, + 'ru-RU': /^[А-ЯЁ]+$/i, + 'sl-SI': /^[A-ZČĆĐŠŽ]+$/i, + 'sk-SK': /^[A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i, + 'sr-RS@latin': /^[A-ZČĆŽŠĐ]+$/i, + 'sr-RS': /^[А-ЯЂЈЉЊЋЏ]+$/i, + 'sv-SE': /^[A-ZÅÄÖ]+$/i, + 'th-TH': /^[ก-๐\s]+$/i, + 'tr-TR': /^[A-ZÇĞİıÖŞÜ]+$/i, + 'uk-UA': /^[А-ЩЬЮЯЄIЇҐі]+$/i, + 'vi-VN': /^[A-ZÀÁẠẢÃÂẦẤẬẨẪĂẰẮẶẲẴĐÈÉẸẺẼÊỀẾỆỂỄÌÍỊỈĨÒÓỌỎÕÔỒỐỘỔỖƠỜỚỢỞỠÙÚỤỦŨƯỪỨỰỬỮỲÝỴỶỸ]+$/i, + 'ku-IQ': /^[ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i, + ar: /^[ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/, + he: /^[א-ת]+$/, + fa: /^['آاءأؤئبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهةی']+$/i, + 'hi-IN': /^[\u0900-\u0961]+[\u0972-\u097F]*$/i +}; +export var alphanumeric = { + 'en-US': /^[0-9A-Z]+$/i, + 'az-AZ': /^[0-9A-VXYZÇƏĞİıÖŞÜ]+$/i, + 'bg-BG': /^[0-9А-Я]+$/i, + 'cs-CZ': /^[0-9A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i, + 'da-DK': /^[0-9A-ZÆØÅ]+$/i, + 'de-DE': /^[0-9A-ZÄÖÜß]+$/i, + 'el-GR': /^[0-9Α-ω]+$/i, + 'es-ES': /^[0-9A-ZÁÉÍÑÓÚÜ]+$/i, + 'fi-FI': /^[0-9A-ZÅÄÖ]+$/i, + 'fr-FR': /^[0-9A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i, + 'it-IT': /^[0-9A-ZÀÉÈÌÎÓÒÙ]+$/i, + 'hu-HU': /^[0-9A-ZÁÉÍÓÖŐÚÜŰ]+$/i, + 'nb-NO': /^[0-9A-ZÆØÅ]+$/i, + 'nl-NL': /^[0-9A-ZÁÉËÏÓÖÜÚ]+$/i, + 'nn-NO': /^[0-9A-ZÆØÅ]+$/i, + 'pl-PL': /^[0-9A-ZĄĆĘŚŁŃÓŻŹ]+$/i, + 'pt-PT': /^[0-9A-ZÃÁÀÂÄÇÉÊËÍÏÕÓÔÖÚÜ]+$/i, + 'ru-RU': /^[0-9А-ЯЁ]+$/i, + 'sl-SI': /^[0-9A-ZČĆĐŠŽ]+$/i, + 'sk-SK': /^[0-9A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i, + 'sr-RS@latin': /^[0-9A-ZČĆŽŠĐ]+$/i, + 'sr-RS': /^[0-9А-ЯЂЈЉЊЋЏ]+$/i, + 'sv-SE': /^[0-9A-ZÅÄÖ]+$/i, + 'th-TH': /^[ก-๙\s]+$/i, + 'tr-TR': /^[0-9A-ZÇĞİıÖŞÜ]+$/i, + 'uk-UA': /^[0-9А-ЩЬЮЯЄIЇҐі]+$/i, + 'ku-IQ': /^[٠١٢٣٤٥٦٧٨٩0-9ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i, + 'vi-VN': /^[0-9A-ZÀÁẠẢÃÂẦẤẬẨẪĂẰẮẶẲẴĐÈÉẸẺẼÊỀẾỆỂỄÌÍỊỈĨÒÓỌỎÕÔỒỐỘỔỖƠỜỚỢỞỠÙÚỤỦŨƯỪỨỰỬỮỲÝỴỶỸ]+$/i, + ar: /^[٠١٢٣٤٥٦٧٨٩0-9ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/, + he: /^[0-9א-ת]+$/, + fa: /^['0-9آاءأؤئبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهةی۱۲۳۴۵۶۷۸۹۰']+$/i, + 'hi-IN': /^[\u0900-\u0963]+[\u0966-\u097F]*$/i +}; +export var decimal = { + 'en-US': '.', + ar: '٫' +}; +export var englishLocales = ['AU', 'GB', 'HK', 'IN', 'NZ', 'ZA', 'ZM']; + +for (var locale, i = 0; i < englishLocales.length; i++) { + locale = "en-".concat(englishLocales[i]); + alpha[locale] = alpha['en-US']; + alphanumeric[locale] = alphanumeric['en-US']; + decimal[locale] = decimal['en-US']; +} // Source: http://www.localeplanet.com/java/ + + +export var arabicLocales = ['AE', 'BH', 'DZ', 'EG', 'IQ', 'JO', 'KW', 'LB', 'LY', 'MA', 'QM', 'QA', 'SA', 'SD', 'SY', 'TN', 'YE']; + +for (var _locale, _i = 0; _i < arabicLocales.length; _i++) { + _locale = "ar-".concat(arabicLocales[_i]); + alpha[_locale] = alpha.ar; + alphanumeric[_locale] = alphanumeric.ar; + decimal[_locale] = decimal.ar; +} + +export var farsiLocales = ['IR', 'AF']; + +for (var _locale2, _i2 = 0; _i2 < farsiLocales.length; _i2++) { + _locale2 = "fa-".concat(farsiLocales[_i2]); + alphanumeric[_locale2] = alphanumeric.fa; + decimal[_locale2] = decimal.ar; +} // Source: https://en.wikipedia.org/wiki/Decimal_mark + + +export var dotDecimal = ['ar-EG', 'ar-LB', 'ar-LY']; +export var commaDecimal = ['bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'el-GR', 'en-ZM', 'es-ES', 'fr-CA', 'fr-FR', 'id-ID', 'it-IT', 'ku-IQ', 'hi-IN', 'hu-HU', 'nb-NO', 'nn-NO', 'nl-NL', 'pl-PL', 'pt-PT', 'ru-RU', 'sl-SI', 'sr-RS@latin', 'sr-RS', 'sv-SE', 'tr-TR', 'uk-UA', 'vi-VN']; + +for (var _i3 = 0; _i3 < dotDecimal.length; _i3++) { + decimal[dotDecimal[_i3]] = decimal['en-US']; +} + +for (var _i4 = 0; _i4 < commaDecimal.length; _i4++) { + decimal[commaDecimal[_i4]] = ','; +} + +alpha['fr-CA'] = alpha['fr-FR']; +alphanumeric['fr-CA'] = alphanumeric['fr-FR']; +alpha['pt-BR'] = alpha['pt-PT']; +alphanumeric['pt-BR'] = alphanumeric['pt-PT']; +decimal['pt-BR'] = decimal['pt-PT']; // see #862 + +alpha['pl-Pl'] = alpha['pl-PL']; +alphanumeric['pl-Pl'] = alphanumeric['pl-PL']; +decimal['pl-Pl'] = decimal['pl-PL']; // see #1455 + +alpha['fa-AF'] = alpha.fa; \ No newline at end of file diff --git a/node_modules/validator/es/lib/blacklist.js b/node_modules/validator/es/lib/blacklist.js new file mode 100644 index 0000000..77c0e5c --- /dev/null +++ b/node_modules/validator/es/lib/blacklist.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function blacklist(str, chars) { + assertString(str); + return str.replace(new RegExp("[".concat(chars, "]+"), 'g'), ''); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/contains.js b/node_modules/validator/es/lib/contains.js new file mode 100644 index 0000000..bf654b8 --- /dev/null +++ b/node_modules/validator/es/lib/contains.js @@ -0,0 +1,17 @@ +import assertString from './util/assertString'; +import toString from './util/toString'; +import merge from './util/merge'; +var defaulContainsOptions = { + ignoreCase: false, + minOccurrences: 1 +}; +export default function contains(str, elem, options) { + assertString(str); + options = merge(options, defaulContainsOptions); + + if (options.ignoreCase) { + return str.toLowerCase().split(toString(elem).toLowerCase()).length > options.minOccurrences; + } + + return str.split(toString(elem)).length > options.minOccurrences; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/equals.js b/node_modules/validator/es/lib/equals.js new file mode 100644 index 0000000..87a9ded --- /dev/null +++ b/node_modules/validator/es/lib/equals.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function equals(str, comparison) { + assertString(str); + return str === comparison; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/escape.js b/node_modules/validator/es/lib/escape.js new file mode 100644 index 0000000..e9bb6de --- /dev/null +++ b/node_modules/validator/es/lib/escape.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function escape(str) { + assertString(str); + return str.replace(/&/g, '&').replace(/"/g, '"').replace(/'/g, ''').replace(//g, '>').replace(/\//g, '/').replace(/\\/g, '\').replace(/`/g, '`'); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isAfter.js b/node_modules/validator/es/lib/isAfter.js new file mode 100644 index 0000000..b99cfa4 --- /dev/null +++ b/node_modules/validator/es/lib/isAfter.js @@ -0,0 +1,9 @@ +import assertString from './util/assertString'; +import toDate from './toDate'; +export default function isAfter(str) { + var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : String(new Date()); + assertString(str); + var comparison = toDate(date); + var original = toDate(str); + return !!(original && comparison && original > comparison); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isAlpha.js b/node_modules/validator/es/lib/isAlpha.js new file mode 100644 index 0000000..5214b74 --- /dev/null +++ b/node_modules/validator/es/lib/isAlpha.js @@ -0,0 +1,26 @@ +import assertString from './util/assertString'; +import { alpha } from './alpha'; +export default function isAlpha(_str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + assertString(_str); + var str = _str; + var ignore = options.ignore; + + if (ignore) { + if (ignore instanceof RegExp) { + str = str.replace(ignore, ''); + } else if (typeof ignore === 'string') { + str = str.replace(new RegExp("[".concat(ignore.replace(/[-[\]{}()*+?.,\\^$|#\\s]/g, '\\$&'), "]"), 'g'), ''); // escape regex for ignore + } else { + throw new Error('ignore should be instance of a String or RegExp'); + } + } + + if (locale in alpha) { + return alpha[locale].test(str); + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} +export var locales = Object.keys(alpha); \ No newline at end of file diff --git a/node_modules/validator/es/lib/isAlphanumeric.js b/node_modules/validator/es/lib/isAlphanumeric.js new file mode 100644 index 0000000..b0556e5 --- /dev/null +++ b/node_modules/validator/es/lib/isAlphanumeric.js @@ -0,0 +1,26 @@ +import assertString from './util/assertString'; +import { alphanumeric } from './alpha'; +export default function isAlphanumeric(_str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + assertString(_str); + var str = _str; + var ignore = options.ignore; + + if (ignore) { + if (ignore instanceof RegExp) { + str = str.replace(ignore, ''); + } else if (typeof ignore === 'string') { + str = str.replace(new RegExp("[".concat(ignore.replace(/[-[\]{}()*+?.,\\^$|#\\s]/g, '\\$&'), "]"), 'g'), ''); // escape regex for ignore + } else { + throw new Error('ignore should be instance of a String or RegExp'); + } + } + + if (locale in alphanumeric) { + return alphanumeric[locale].test(str); + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} +export var locales = Object.keys(alphanumeric); \ No newline at end of file diff --git a/node_modules/validator/es/lib/isAscii.js b/node_modules/validator/es/lib/isAscii.js new file mode 100644 index 0000000..e322121 --- /dev/null +++ b/node_modules/validator/es/lib/isAscii.js @@ -0,0 +1,10 @@ +import assertString from './util/assertString'; +/* eslint-disable no-control-regex */ + +var ascii = /^[\x00-\x7F]+$/; +/* eslint-enable no-control-regex */ + +export default function isAscii(str) { + assertString(str); + return ascii.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBIC.js b/node_modules/validator/es/lib/isBIC.js new file mode 100644 index 0000000..8434a71 --- /dev/null +++ b/node_modules/validator/es/lib/isBIC.js @@ -0,0 +1,14 @@ +import assertString from './util/assertString'; +import { CountryCodes } from './isISO31661Alpha2'; // https://en.wikipedia.org/wiki/ISO_9362 + +var isBICReg = /^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$/; +export default function isBIC(str) { + assertString(str); // toUpperCase() should be removed when a new major version goes out that changes + // the regex to [A-Z] (per the spec). + + if (!CountryCodes.has(str.slice(4, 6).toUpperCase())) { + return false; + } + + return isBICReg.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBase32.js b/node_modules/validator/es/lib/isBase32.js new file mode 100644 index 0000000..d018f80 --- /dev/null +++ b/node_modules/validator/es/lib/isBase32.js @@ -0,0 +1,12 @@ +import assertString from './util/assertString'; +var base32 = /^[A-Z2-7]+=*$/; +export default function isBase32(str) { + assertString(str); + var len = str.length; + + if (len % 8 === 0 && base32.test(str)) { + return true; + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBase58.js b/node_modules/validator/es/lib/isBase58.js new file mode 100644 index 0000000..863bbe9 --- /dev/null +++ b/node_modules/validator/es/lib/isBase58.js @@ -0,0 +1,12 @@ +import assertString from './util/assertString'; // Accepted chars - 123456789ABCDEFGH JKLMN PQRSTUVWXYZabcdefghijk mnopqrstuvwxyz + +var base58Reg = /^[A-HJ-NP-Za-km-z1-9]*$/; +export default function isBase58(str) { + assertString(str); + + if (base58Reg.test(str)) { + return true; + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBase64.js b/node_modules/validator/es/lib/isBase64.js new file mode 100644 index 0000000..04467fc --- /dev/null +++ b/node_modules/validator/es/lib/isBase64.js @@ -0,0 +1,23 @@ +import assertString from './util/assertString'; +import merge from './util/merge'; +var notBase64 = /[^A-Z0-9+\/=]/i; +var urlSafeBase64 = /^[A-Z0-9_\-]*$/i; +var defaultBase64Options = { + urlSafe: false +}; +export default function isBase64(str, options) { + assertString(str); + options = merge(options, defaultBase64Options); + var len = str.length; + + if (options.urlSafe) { + return urlSafeBase64.test(str); + } + + if (len % 4 !== 0 || notBase64.test(str)) { + return false; + } + + var firstPaddingChar = str.indexOf('='); + return firstPaddingChar === -1 || firstPaddingChar === len - 1 || firstPaddingChar === len - 2 && str[len - 1] === '='; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBefore.js b/node_modules/validator/es/lib/isBefore.js new file mode 100644 index 0000000..794dbab --- /dev/null +++ b/node_modules/validator/es/lib/isBefore.js @@ -0,0 +1,9 @@ +import assertString from './util/assertString'; +import toDate from './toDate'; +export default function isBefore(str) { + var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : String(new Date()); + assertString(str); + var comparison = toDate(date); + var original = toDate(str); + return !!(original && comparison && original < comparison); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBoolean.js b/node_modules/validator/es/lib/isBoolean.js new file mode 100644 index 0000000..c0be07a --- /dev/null +++ b/node_modules/validator/es/lib/isBoolean.js @@ -0,0 +1,16 @@ +import assertString from './util/assertString'; +var defaultOptions = { + loose: false +}; +var strictBooleans = ['true', 'false', '1', '0']; +var looseBooleans = [].concat(strictBooleans, ['yes', 'no']); +export default function isBoolean(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : defaultOptions; + assertString(str); + + if (options.loose) { + return looseBooleans.includes(str.toLowerCase()); + } + + return strictBooleans.includes(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isBtcAddress.js b/node_modules/validator/es/lib/isBtcAddress.js new file mode 100644 index 0000000..e29c912 --- /dev/null +++ b/node_modules/validator/es/lib/isBtcAddress.js @@ -0,0 +1,13 @@ +import assertString from './util/assertString'; // supports Bech32 addresses + +var bech32 = /^(bc1)[a-z0-9]{25,39}$/; +var base58 = /^(1|3)[A-HJ-NP-Za-km-z1-9]{25,39}$/; +export default function isBtcAddress(str) { + assertString(str); // check for bech32 + + if (str.startsWith('bc1')) { + return bech32.test(str); + } + + return base58.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isByteLength.js b/node_modules/validator/es/lib/isByteLength.js new file mode 100644 index 0000000..eee2543 --- /dev/null +++ b/node_modules/validator/es/lib/isByteLength.js @@ -0,0 +1,22 @@ +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +import assertString from './util/assertString'; +/* eslint-disable prefer-rest-params */ + +export default function isByteLength(str, options) { + assertString(str); + var min; + var max; + + if (_typeof(options) === 'object') { + min = options.min || 0; + max = options.max; + } else { + // backwards compatibility: isByteLength(str, min [, max]) + min = arguments[1]; + max = arguments[2]; + } + + var len = encodeURI(str).split(/%..|./).length - 1; + return len >= min && (typeof max === 'undefined' || len <= max); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isCreditCard.js b/node_modules/validator/es/lib/isCreditCard.js new file mode 100644 index 0000000..f1beb2d --- /dev/null +++ b/node_modules/validator/es/lib/isCreditCard.js @@ -0,0 +1,40 @@ +import assertString from './util/assertString'; +/* eslint-disable max-len */ + +var creditCard = /^(?:4[0-9]{12}(?:[0-9]{3,6})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12,15}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11}|6[27][0-9]{14}|^(81[0-9]{14,17}))$/; +/* eslint-enable max-len */ + +export default function isCreditCard(str) { + assertString(str); + var sanitized = str.replace(/[- ]+/g, ''); + + if (!creditCard.test(sanitized)) { + return false; + } + + var sum = 0; + var digit; + var tmpNum; + var shouldDouble; + + for (var i = sanitized.length - 1; i >= 0; i--) { + digit = sanitized.substring(i, i + 1); + tmpNum = parseInt(digit, 10); + + if (shouldDouble) { + tmpNum *= 2; + + if (tmpNum >= 10) { + sum += tmpNum % 10 + 1; + } else { + sum += tmpNum; + } + } else { + sum += tmpNum; + } + + shouldDouble = !shouldDouble; + } + + return !!(sum % 10 === 0 ? sanitized : false); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isCurrency.js b/node_modules/validator/es/lib/isCurrency.js new file mode 100755 index 0000000..519ee19 --- /dev/null +++ b/node_modules/validator/es/lib/isCurrency.js @@ -0,0 +1,77 @@ +import merge from './util/merge'; +import assertString from './util/assertString'; + +function currencyRegex(options) { + var decimal_digits = "\\d{".concat(options.digits_after_decimal[0], "}"); + options.digits_after_decimal.forEach(function (digit, index) { + if (index !== 0) decimal_digits = "".concat(decimal_digits, "|\\d{").concat(digit, "}"); + }); + var symbol = "(".concat(options.symbol.replace(/\W/, function (m) { + return "\\".concat(m); + }), ")").concat(options.require_symbol ? '' : '?'), + negative = '-?', + whole_dollar_amount_without_sep = '[1-9]\\d*', + whole_dollar_amount_with_sep = "[1-9]\\d{0,2}(\\".concat(options.thousands_separator, "\\d{3})*"), + valid_whole_dollar_amounts = ['0', whole_dollar_amount_without_sep, whole_dollar_amount_with_sep], + whole_dollar_amount = "(".concat(valid_whole_dollar_amounts.join('|'), ")?"), + decimal_amount = "(\\".concat(options.decimal_separator, "(").concat(decimal_digits, "))").concat(options.require_decimal ? '' : '?'); + var pattern = whole_dollar_amount + (options.allow_decimal || options.require_decimal ? decimal_amount : ''); // default is negative sign before symbol, but there are two other options (besides parens) + + if (options.allow_negatives && !options.parens_for_negatives) { + if (options.negative_sign_after_digits) { + pattern += negative; + } else if (options.negative_sign_before_digits) { + pattern = negative + pattern; + } + } // South African Rand, for example, uses R 123 (space) and R-123 (no space) + + + if (options.allow_negative_sign_placeholder) { + pattern = "( (?!\\-))?".concat(pattern); + } else if (options.allow_space_after_symbol) { + pattern = " ?".concat(pattern); + } else if (options.allow_space_after_digits) { + pattern += '( (?!$))?'; + } + + if (options.symbol_after_digits) { + pattern += symbol; + } else { + pattern = symbol + pattern; + } + + if (options.allow_negatives) { + if (options.parens_for_negatives) { + pattern = "(\\(".concat(pattern, "\\)|").concat(pattern, ")"); + } else if (!(options.negative_sign_before_digits || options.negative_sign_after_digits)) { + pattern = negative + pattern; + } + } // ensure there's a dollar and/or decimal amount, and that + // it doesn't start with a space or a negative sign followed by a space + + + return new RegExp("^(?!-? )(?=.*\\d)".concat(pattern, "$")); +} + +var default_currency_options = { + symbol: '$', + require_symbol: false, + allow_space_after_symbol: false, + symbol_after_digits: false, + allow_negatives: true, + parens_for_negatives: false, + negative_sign_before_digits: false, + negative_sign_after_digits: false, + allow_negative_sign_placeholder: false, + thousands_separator: ',', + decimal_separator: '.', + allow_decimal: true, + require_decimal: false, + digits_after_decimal: [2], + allow_space_after_digits: false +}; +export default function isCurrency(str, options) { + assertString(str); + options = merge(options, default_currency_options); + return currencyRegex(options).test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isDataURI.js b/node_modules/validator/es/lib/isDataURI.js new file mode 100644 index 0000000..a32d05e --- /dev/null +++ b/node_modules/validator/es/lib/isDataURI.js @@ -0,0 +1,39 @@ +import assertString from './util/assertString'; +var validMediaType = /^[a-z]+\/[a-z0-9\-\+]+$/i; +var validAttribute = /^[a-z\-]+=[a-z0-9\-]+$/i; +var validData = /^[a-z0-9!\$&'\(\)\*\+,;=\-\._~:@\/\?%\s]*$/i; +export default function isDataURI(str) { + assertString(str); + var data = str.split(','); + + if (data.length < 2) { + return false; + } + + var attributes = data.shift().trim().split(';'); + var schemeAndMediaType = attributes.shift(); + + if (schemeAndMediaType.substr(0, 5) !== 'data:') { + return false; + } + + var mediaType = schemeAndMediaType.substr(5); + + if (mediaType !== '' && !validMediaType.test(mediaType)) { + return false; + } + + for (var i = 0; i < attributes.length; i++) { + if (!(i === attributes.length - 1 && attributes[i].toLowerCase() === 'base64') && !validAttribute.test(attributes[i])) { + return false; + } + } + + for (var _i = 0; _i < data.length; _i++) { + if (!validData.test(data[_i])) { + return false; + } + } + + return true; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isDate.js b/node_modules/validator/es/lib/isDate.js new file mode 100644 index 0000000..c4d0d08 --- /dev/null +++ b/node_modules/validator/es/lib/isDate.js @@ -0,0 +1,86 @@ +function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); } + +function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _iterableToArrayLimit(arr, i) { if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } + +function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e2) { throw _e2; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e3) { didErr = true; err = _e3; }, f: function f() { try { if (!normalCompletion && it["return"] != null) it["return"](); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +import merge from './util/merge'; +var default_date_options = { + format: 'YYYY/MM/DD', + delimiters: ['/', '-'], + strictMode: false +}; + +function isValidFormat(format) { + return /(^(y{4}|y{2})[.\/-](m{1,2})[.\/-](d{1,2})$)|(^(m{1,2})[.\/-](d{1,2})[.\/-]((y{4}|y{2})$))|(^(d{1,2})[.\/-](m{1,2})[.\/-]((y{4}|y{2})$))/gi.test(format); +} + +function zip(date, format) { + var zippedArr = [], + len = Math.min(date.length, format.length); + + for (var i = 0; i < len; i++) { + zippedArr.push([date[i], format[i]]); + } + + return zippedArr; +} + +export default function isDate(input, options) { + if (typeof options === 'string') { + // Allow backward compatbility for old format isDate(input [, format]) + options = merge({ + format: options + }, default_date_options); + } else { + options = merge(options, default_date_options); + } + + if (typeof input === 'string' && isValidFormat(options.format)) { + var formatDelimiter = options.delimiters.find(function (delimiter) { + return options.format.indexOf(delimiter) !== -1; + }); + var dateDelimiter = options.strictMode ? formatDelimiter : options.delimiters.find(function (delimiter) { + return input.indexOf(delimiter) !== -1; + }); + var dateAndFormat = zip(input.split(dateDelimiter), options.format.toLowerCase().split(formatDelimiter)); + var dateObj = {}; + + var _iterator = _createForOfIteratorHelper(dateAndFormat), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _slicedToArray(_step.value, 2), + dateWord = _step$value[0], + formatWord = _step$value[1]; + + if (dateWord.length !== formatWord.length) { + return false; + } + + dateObj[formatWord.charAt(0)] = dateWord; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return new Date("".concat(dateObj.m, "/").concat(dateObj.d, "/").concat(dateObj.y)).getDate() === +dateObj.d; + } + + if (!options.strictMode) { + return Object.prototype.toString.call(input) === '[object Date]' && isFinite(input); + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isDecimal.js b/node_modules/validator/es/lib/isDecimal.js new file mode 100644 index 0000000..597f42c --- /dev/null +++ b/node_modules/validator/es/lib/isDecimal.js @@ -0,0 +1,26 @@ +import merge from './util/merge'; +import assertString from './util/assertString'; +import includes from './util/includes'; +import { decimal } from './alpha'; + +function decimalRegExp(options) { + var regExp = new RegExp("^[-+]?([0-9]+)?(\\".concat(decimal[options.locale], "[0-9]{").concat(options.decimal_digits, "})").concat(options.force_decimal ? '' : '?', "$")); + return regExp; +} + +var default_decimal_options = { + force_decimal: false, + decimal_digits: '1,', + locale: 'en-US' +}; +var blacklist = ['', '-', '+']; +export default function isDecimal(str, options) { + assertString(str); + options = merge(options, default_decimal_options); + + if (options.locale in decimal) { + return !includes(blacklist, str.replace(/ /g, '')) && decimalRegExp(options).test(str); + } + + throw new Error("Invalid locale '".concat(options.locale, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isDivisibleBy.js b/node_modules/validator/es/lib/isDivisibleBy.js new file mode 100644 index 0000000..f71d5f4 --- /dev/null +++ b/node_modules/validator/es/lib/isDivisibleBy.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +import toFloat from './toFloat'; +export default function isDivisibleBy(str, num) { + assertString(str); + return toFloat(str) % parseInt(num, 10) === 0; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isEAN.js b/node_modules/validator/es/lib/isEAN.js new file mode 100644 index 0000000..246db93 --- /dev/null +++ b/node_modules/validator/es/lib/isEAN.js @@ -0,0 +1,72 @@ +/** + * The most commonly used EAN standard is + * the thirteen-digit EAN-13, while the + * less commonly used 8-digit EAN-8 barcode was + * introduced for use on small packages. + * Also EAN/UCC-14 is used for Grouping of individual + * trade items above unit level(Intermediate, Carton or Pallet). + * For more info about EAN-14 checkout: https://www.gtin.info/itf-14-barcodes/ + * EAN consists of: + * GS1 prefix, manufacturer code, product code and check digit + * Reference: https://en.wikipedia.org/wiki/International_Article_Number + * Reference: https://www.gtin.info/ + */ +import assertString from './util/assertString'; +/** + * Define EAN Lenghts; 8 for EAN-8; 13 for EAN-13; 14 for EAN-14 + * and Regular Expression for valid EANs (EAN-8, EAN-13, EAN-14), + * with exact numberic matching of 8 or 13 or 14 digits [0-9] + */ + +var LENGTH_EAN_8 = 8; +var LENGTH_EAN_14 = 14; +var validEanRegex = /^(\d{8}|\d{13}|\d{14})$/; +/** + * Get position weight given: + * EAN length and digit index/position + * + * @param {number} length + * @param {number} index + * @return {number} + */ + +function getPositionWeightThroughLengthAndIndex(length, index) { + if (length === LENGTH_EAN_8 || length === LENGTH_EAN_14) { + return index % 2 === 0 ? 3 : 1; + } + + return index % 2 === 0 ? 1 : 3; +} +/** + * Calculate EAN Check Digit + * Reference: https://en.wikipedia.org/wiki/International_Article_Number#Calculation_of_checksum_digit + * + * @param {string} ean + * @return {number} + */ + + +function calculateCheckDigit(ean) { + var checksum = ean.slice(0, -1).split('').map(function (_char, index) { + return Number(_char) * getPositionWeightThroughLengthAndIndex(ean.length, index); + }).reduce(function (acc, partialSum) { + return acc + partialSum; + }, 0); + var remainder = 10 - checksum % 10; + return remainder < 10 ? remainder : 0; +} +/** + * Check if string is valid EAN: + * Matches EAN-8/EAN-13/EAN-14 regex + * Has valid check digit. + * + * @param {string} str + * @return {boolean} + */ + + +export default function isEAN(str) { + assertString(str); + var actualCheckDigit = Number(str.slice(-1)); + return validEanRegex.test(str) && actualCheckDigit === calculateCheckDigit(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isEmail.js b/node_modules/validator/es/lib/isEmail.js new file mode 100644 index 0000000..d44c1ac --- /dev/null +++ b/node_modules/validator/es/lib/isEmail.js @@ -0,0 +1,180 @@ +import assertString from './util/assertString'; +import merge from './util/merge'; +import isByteLength from './isByteLength'; +import isFQDN from './isFQDN'; +import isIP from './isIP'; +var default_email_options = { + allow_display_name: false, + require_display_name: false, + allow_utf8_local_part: true, + require_tld: true, + blacklisted_chars: '', + ignore_max_length: false, + host_blacklist: [] +}; +/* eslint-disable max-len */ + +/* eslint-disable no-control-regex */ + +var splitNameAddress = /^([^\x00-\x1F\x7F-\x9F\cX]+)]/.test(display_name_without_quotes); + + if (contains_illegal) { + // if contains illegal characters, + // must to be enclosed in double-quotes, otherwise it's not a valid display name + if (display_name_without_quotes === display_name) { + return false; + } // the quotes in display name must start with character symbol \ + + + var all_start_with_back_slash = display_name_without_quotes.split('"').length === display_name_without_quotes.split('\\"').length; + + if (!all_start_with_back_slash) { + return false; + } + } + + return true; +} + +export default function isEmail(str, options) { + assertString(str); + options = merge(options, default_email_options); + + if (options.require_display_name || options.allow_display_name) { + var display_email = str.match(splitNameAddress); + + if (display_email) { + var display_name = display_email[1]; // Remove display name and angle brackets to get email address + // Can be done in the regex but will introduce a ReDOS (See #1597 for more info) + + str = str.replace(display_name, '').replace(/(^<|>$)/g, ''); // sometimes need to trim the last space to get the display name + // because there may be a space between display name and email address + // eg. myname + // the display name is `myname` instead of `myname `, so need to trim the last space + + if (display_name.endsWith(' ')) { + display_name = display_name.substr(0, display_name.length - 1); + } + + if (!validateDisplayName(display_name)) { + return false; + } + } else if (options.require_display_name) { + return false; + } + } + + if (!options.ignore_max_length && str.length > defaultMaxEmailLength) { + return false; + } + + var parts = str.split('@'); + var domain = parts.pop(); + var lower_domain = domain.toLowerCase(); + + if (options.host_blacklist.includes(lower_domain)) { + return false; + } + + var user = parts.join('@'); + + if (options.domain_specific_validation && (lower_domain === 'gmail.com' || lower_domain === 'googlemail.com')) { + /* + Previously we removed dots for gmail addresses before validating. + This was removed because it allows `multiple..dots@gmail.com` + to be reported as valid, but it is not. + Gmail only normalizes single dots, removing them from here is pointless, + should be done in normalizeEmail + */ + user = user.toLowerCase(); // Removing sub-address from username before gmail validation + + var username = user.split('+')[0]; // Dots are not included in gmail length restriction + + if (!isByteLength(username.replace(/\./g, ''), { + min: 6, + max: 30 + })) { + return false; + } + + var _user_parts = username.split('.'); + + for (var i = 0; i < _user_parts.length; i++) { + if (!gmailUserPart.test(_user_parts[i])) { + return false; + } + } + } + + if (options.ignore_max_length === false && (!isByteLength(user, { + max: 64 + }) || !isByteLength(domain, { + max: 254 + }))) { + return false; + } + + if (!isFQDN(domain, { + require_tld: options.require_tld + })) { + if (!options.allow_ip_domain) { + return false; + } + + if (!isIP(domain)) { + if (!domain.startsWith('[') || !domain.endsWith(']')) { + return false; + } + + var noBracketdomain = domain.substr(1, domain.length - 2); + + if (noBracketdomain.length === 0 || !isIP(noBracketdomain)) { + return false; + } + } + } + + if (user[0] === '"') { + user = user.slice(1, user.length - 1); + return options.allow_utf8_local_part ? quotedEmailUserUtf8.test(user) : quotedEmailUser.test(user); + } + + var pattern = options.allow_utf8_local_part ? emailUserUtf8Part : emailUserPart; + var user_parts = user.split('.'); + + for (var _i = 0; _i < user_parts.length; _i++) { + if (!pattern.test(user_parts[_i])) { + return false; + } + } + + if (options.blacklisted_chars) { + if (user.search(new RegExp("[".concat(options.blacklisted_chars, "]+"), 'g')) !== -1) return false; + } + + return true; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isEmpty.js b/node_modules/validator/es/lib/isEmpty.js new file mode 100644 index 0000000..79e29f7 --- /dev/null +++ b/node_modules/validator/es/lib/isEmpty.js @@ -0,0 +1,10 @@ +import assertString from './util/assertString'; +import merge from './util/merge'; +var default_is_empty_options = { + ignore_whitespace: false +}; +export default function isEmpty(str, options) { + assertString(str); + options = merge(options, default_is_empty_options); + return (options.ignore_whitespace ? str.trim().length : str.length) === 0; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isEthereumAddress.js b/node_modules/validator/es/lib/isEthereumAddress.js new file mode 100644 index 0000000..9c70f64 --- /dev/null +++ b/node_modules/validator/es/lib/isEthereumAddress.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var eth = /^(0x)[0-9a-f]{40}$/i; +export default function isEthereumAddress(str) { + assertString(str); + return eth.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isFQDN.js b/node_modules/validator/es/lib/isFQDN.js new file mode 100644 index 0000000..b63f737 --- /dev/null +++ b/node_modules/validator/es/lib/isFQDN.js @@ -0,0 +1,74 @@ +import assertString from './util/assertString'; +import merge from './util/merge'; +var default_fqdn_options = { + require_tld: true, + allow_underscores: false, + allow_trailing_dot: false, + allow_numeric_tld: false, + allow_wildcard: false +}; +export default function isFQDN(str, options) { + assertString(str); + options = merge(options, default_fqdn_options); + /* Remove the optional trailing dot before checking validity */ + + if (options.allow_trailing_dot && str[str.length - 1] === '.') { + str = str.substring(0, str.length - 1); + } + /* Remove the optional wildcard before checking validity */ + + + if (options.allow_wildcard === true && str.indexOf('*.') === 0) { + str = str.substring(2); + } + + var parts = str.split('.'); + var tld = parts[parts.length - 1]; + + if (options.require_tld) { + // disallow fqdns without tld + if (parts.length < 2) { + return false; + } + + if (!/^([a-z\u00A1-\u00A8\u00AA-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]{2,}|xn[a-z0-9-]{2,})$/i.test(tld)) { + return false; + } // disallow spaces + + + if (/\s/.test(tld)) { + return false; + } + } // reject numeric TLDs + + + if (!options.allow_numeric_tld && /^\d+$/.test(tld)) { + return false; + } + + return parts.every(function (part) { + if (part.length > 63) { + return false; + } + + if (!/^[a-z_\u00a1-\uffff0-9-]+$/i.test(part)) { + return false; + } // disallow full-width chars + + + if (/[\uff01-\uff5e]/.test(part)) { + return false; + } // disallow parts starting or ending with hyphen + + + if (/^-|-$/.test(part)) { + return false; + } + + if (!options.allow_underscores && /_/.test(part)) { + return false; + } + + return true; + }); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isFloat.js b/node_modules/validator/es/lib/isFloat.js new file mode 100644 index 0000000..069873b --- /dev/null +++ b/node_modules/validator/es/lib/isFloat.js @@ -0,0 +1,16 @@ +import assertString from './util/assertString'; +import { decimal } from './alpha'; +export default function isFloat(str, options) { + assertString(str); + options = options || {}; + + var _float = new RegExp("^(?:[-+])?(?:[0-9]+)?(?:\\".concat(options.locale ? decimal[options.locale] : '.', "[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$")); + + if (str === '' || str === '.' || str === '-' || str === '+') { + return false; + } + + var value = parseFloat(str.replace(',', '.')); + return _float.test(str) && (!options.hasOwnProperty('min') || value >= options.min) && (!options.hasOwnProperty('max') || value <= options.max) && (!options.hasOwnProperty('lt') || value < options.lt) && (!options.hasOwnProperty('gt') || value > options.gt); +} +export var locales = Object.keys(decimal); \ No newline at end of file diff --git a/node_modules/validator/es/lib/isFullWidth.js b/node_modules/validator/es/lib/isFullWidth.js new file mode 100644 index 0000000..ae54629 --- /dev/null +++ b/node_modules/validator/es/lib/isFullWidth.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +export var fullWidth = /[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/; +export default function isFullWidth(str) { + assertString(str); + return fullWidth.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isHSL.js b/node_modules/validator/es/lib/isHSL.js new file mode 100644 index 0000000..7efe8e9 --- /dev/null +++ b/node_modules/validator/es/lib/isHSL.js @@ -0,0 +1,14 @@ +import assertString from './util/assertString'; +var hslComma = /^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(,(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}(,((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?))?\)$/i; +var hslSpace = /^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(\s(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}\s?(\/\s((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?)\s?)?\)$/i; +export default function isHSL(str) { + assertString(str); // Strip duplicate spaces before calling the validation regex (See #1598 for more info) + + var strippedStr = str.replace(/\s+/g, ' ').replace(/\s?(hsla?\(|\)|,)\s?/ig, '$1'); + + if (strippedStr.indexOf(',') !== -1) { + return hslComma.test(strippedStr); + } + + return hslSpace.test(strippedStr); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isHalfWidth.js b/node_modules/validator/es/lib/isHalfWidth.js new file mode 100644 index 0000000..b0c8795 --- /dev/null +++ b/node_modules/validator/es/lib/isHalfWidth.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +export var halfWidth = /[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/; +export default function isHalfWidth(str) { + assertString(str); + return halfWidth.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isHash.js b/node_modules/validator/es/lib/isHash.js new file mode 100644 index 0000000..3495486 --- /dev/null +++ b/node_modules/validator/es/lib/isHash.js @@ -0,0 +1,21 @@ +import assertString from './util/assertString'; +var lengths = { + md5: 32, + md4: 32, + sha1: 40, + sha256: 64, + sha384: 96, + sha512: 128, + ripemd128: 32, + ripemd160: 40, + tiger128: 32, + tiger160: 40, + tiger192: 48, + crc32: 8, + crc32b: 8 +}; +export default function isHash(str, algorithm) { + assertString(str); + var hash = new RegExp("^[a-fA-F0-9]{".concat(lengths[algorithm], "}$")); + return hash.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isHexColor.js b/node_modules/validator/es/lib/isHexColor.js new file mode 100644 index 0000000..72eab2c --- /dev/null +++ b/node_modules/validator/es/lib/isHexColor.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var hexcolor = /^#?([0-9A-F]{3}|[0-9A-F]{4}|[0-9A-F]{6}|[0-9A-F]{8})$/i; +export default function isHexColor(str) { + assertString(str); + return hexcolor.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isHexadecimal.js b/node_modules/validator/es/lib/isHexadecimal.js new file mode 100644 index 0000000..6654de4 --- /dev/null +++ b/node_modules/validator/es/lib/isHexadecimal.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var hexadecimal = /^(0x|0h)?[0-9A-F]+$/i; +export default function isHexadecimal(str) { + assertString(str); + return hexadecimal.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isIBAN.js b/node_modules/validator/es/lib/isIBAN.js new file mode 100644 index 0000000..0c97a01 --- /dev/null +++ b/node_modules/validator/es/lib/isIBAN.js @@ -0,0 +1,138 @@ +import assertString from './util/assertString'; +/** + * List of country codes with + * corresponding IBAN regular expression + * Reference: https://en.wikipedia.org/wiki/International_Bank_Account_Number + */ + +var ibanRegexThroughCountryCode = { + AD: /^(AD[0-9]{2})\d{8}[A-Z0-9]{12}$/, + AE: /^(AE[0-9]{2})\d{3}\d{16}$/, + AL: /^(AL[0-9]{2})\d{8}[A-Z0-9]{16}$/, + AT: /^(AT[0-9]{2})\d{16}$/, + AZ: /^(AZ[0-9]{2})[A-Z0-9]{4}\d{20}$/, + BA: /^(BA[0-9]{2})\d{16}$/, + BE: /^(BE[0-9]{2})\d{12}$/, + BG: /^(BG[0-9]{2})[A-Z]{4}\d{6}[A-Z0-9]{8}$/, + BH: /^(BH[0-9]{2})[A-Z]{4}[A-Z0-9]{14}$/, + BR: /^(BR[0-9]{2})\d{23}[A-Z]{1}[A-Z0-9]{1}$/, + BY: /^(BY[0-9]{2})[A-Z0-9]{4}\d{20}$/, + CH: /^(CH[0-9]{2})\d{5}[A-Z0-9]{12}$/, + CR: /^(CR[0-9]{2})\d{18}$/, + CY: /^(CY[0-9]{2})\d{8}[A-Z0-9]{16}$/, + CZ: /^(CZ[0-9]{2})\d{20}$/, + DE: /^(DE[0-9]{2})\d{18}$/, + DK: /^(DK[0-9]{2})\d{14}$/, + DO: /^(DO[0-9]{2})[A-Z]{4}\d{20}$/, + EE: /^(EE[0-9]{2})\d{16}$/, + EG: /^(EG[0-9]{2})\d{25}$/, + ES: /^(ES[0-9]{2})\d{20}$/, + FI: /^(FI[0-9]{2})\d{14}$/, + FO: /^(FO[0-9]{2})\d{14}$/, + FR: /^(FR[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/, + GB: /^(GB[0-9]{2})[A-Z]{4}\d{14}$/, + GE: /^(GE[0-9]{2})[A-Z0-9]{2}\d{16}$/, + GI: /^(GI[0-9]{2})[A-Z]{4}[A-Z0-9]{15}$/, + GL: /^(GL[0-9]{2})\d{14}$/, + GR: /^(GR[0-9]{2})\d{7}[A-Z0-9]{16}$/, + GT: /^(GT[0-9]{2})[A-Z0-9]{4}[A-Z0-9]{20}$/, + HR: /^(HR[0-9]{2})\d{17}$/, + HU: /^(HU[0-9]{2})\d{24}$/, + IE: /^(IE[0-9]{2})[A-Z0-9]{4}\d{14}$/, + IL: /^(IL[0-9]{2})\d{19}$/, + IQ: /^(IQ[0-9]{2})[A-Z]{4}\d{15}$/, + IR: /^(IR[0-9]{2})0\d{2}0\d{18}$/, + IS: /^(IS[0-9]{2})\d{22}$/, + IT: /^(IT[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/, + JO: /^(JO[0-9]{2})[A-Z]{4}\d{22}$/, + KW: /^(KW[0-9]{2})[A-Z]{4}[A-Z0-9]{22}$/, + KZ: /^(KZ[0-9]{2})\d{3}[A-Z0-9]{13}$/, + LB: /^(LB[0-9]{2})\d{4}[A-Z0-9]{20}$/, + LC: /^(LC[0-9]{2})[A-Z]{4}[A-Z0-9]{24}$/, + LI: /^(LI[0-9]{2})\d{5}[A-Z0-9]{12}$/, + LT: /^(LT[0-9]{2})\d{16}$/, + LU: /^(LU[0-9]{2})\d{3}[A-Z0-9]{13}$/, + LV: /^(LV[0-9]{2})[A-Z]{4}[A-Z0-9]{13}$/, + MC: /^(MC[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/, + MD: /^(MD[0-9]{2})[A-Z0-9]{20}$/, + ME: /^(ME[0-9]{2})\d{18}$/, + MK: /^(MK[0-9]{2})\d{3}[A-Z0-9]{10}\d{2}$/, + MR: /^(MR[0-9]{2})\d{23}$/, + MT: /^(MT[0-9]{2})[A-Z]{4}\d{5}[A-Z0-9]{18}$/, + MU: /^(MU[0-9]{2})[A-Z]{4}\d{19}[A-Z]{3}$/, + MZ: /^(MZ[0-9]{2})\d{21}$/, + NL: /^(NL[0-9]{2})[A-Z]{4}\d{10}$/, + NO: /^(NO[0-9]{2})\d{11}$/, + PK: /^(PK[0-9]{2})[A-Z0-9]{4}\d{16}$/, + PL: /^(PL[0-9]{2})\d{24}$/, + PS: /^(PS[0-9]{2})[A-Z0-9]{4}\d{21}$/, + PT: /^(PT[0-9]{2})\d{21}$/, + QA: /^(QA[0-9]{2})[A-Z]{4}[A-Z0-9]{21}$/, + RO: /^(RO[0-9]{2})[A-Z]{4}[A-Z0-9]{16}$/, + RS: /^(RS[0-9]{2})\d{18}$/, + SA: /^(SA[0-9]{2})\d{2}[A-Z0-9]{18}$/, + SC: /^(SC[0-9]{2})[A-Z]{4}\d{20}[A-Z]{3}$/, + SE: /^(SE[0-9]{2})\d{20}$/, + SI: /^(SI[0-9]{2})\d{15}$/, + SK: /^(SK[0-9]{2})\d{20}$/, + SM: /^(SM[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/, + SV: /^(SV[0-9]{2})[A-Z0-9]{4}\d{20}$/, + TL: /^(TL[0-9]{2})\d{19}$/, + TN: /^(TN[0-9]{2})\d{20}$/, + TR: /^(TR[0-9]{2})\d{5}[A-Z0-9]{17}$/, + UA: /^(UA[0-9]{2})\d{6}[A-Z0-9]{19}$/, + VA: /^(VA[0-9]{2})\d{18}$/, + VG: /^(VG[0-9]{2})[A-Z0-9]{4}\d{16}$/, + XK: /^(XK[0-9]{2})\d{16}$/ +}; +/** + * Check whether string has correct universal IBAN format + * The IBAN consists of up to 34 alphanumeric characters, as follows: + * Country Code using ISO 3166-1 alpha-2, two letters + * check digits, two digits and + * Basic Bank Account Number (BBAN), up to 30 alphanumeric characters. + * NOTE: Permitted IBAN characters are: digits [0-9] and the 26 latin alphabetic [A-Z] + * + * @param {string} str - string under validation + * @return {boolean} + */ + +function hasValidIbanFormat(str) { + // Strip white spaces and hyphens + var strippedStr = str.replace(/[\s\-]+/gi, '').toUpperCase(); + var isoCountryCode = strippedStr.slice(0, 2).toUpperCase(); + return isoCountryCode in ibanRegexThroughCountryCode && ibanRegexThroughCountryCode[isoCountryCode].test(strippedStr); +} +/** + * Check whether string has valid IBAN Checksum + * by performing basic mod-97 operation and + * the remainder should equal 1 + * -- Start by rearranging the IBAN by moving the four initial characters to the end of the string + * -- Replace each letter in the string with two digits, A -> 10, B = 11, Z = 35 + * -- Interpret the string as a decimal integer and + * -- compute the remainder on division by 97 (mod 97) + * Reference: https://en.wikipedia.org/wiki/International_Bank_Account_Number + * + * @param {string} str + * @return {boolean} + */ + + +function hasValidIbanChecksum(str) { + var strippedStr = str.replace(/[^A-Z0-9]+/gi, '').toUpperCase(); // Keep only digits and A-Z latin alphabetic + + var rearranged = strippedStr.slice(4) + strippedStr.slice(0, 4); + var alphaCapsReplacedWithDigits = rearranged.replace(/[A-Z]/g, function (_char) { + return _char.charCodeAt(0) - 55; + }); + var remainder = alphaCapsReplacedWithDigits.match(/\d{1,7}/g).reduce(function (acc, value) { + return Number(acc + value) % 97; + }, ''); + return remainder === 1; +} + +export default function isIBAN(str) { + assertString(str); + return hasValidIbanFormat(str) && hasValidIbanChecksum(str); +} +export var locales = Object.keys(ibanRegexThroughCountryCode); \ No newline at end of file diff --git a/node_modules/validator/es/lib/isIMEI.js b/node_modules/validator/es/lib/isIMEI.js new file mode 100644 index 0000000..27bd09d --- /dev/null +++ b/node_modules/validator/es/lib/isIMEI.js @@ -0,0 +1,47 @@ +import assertString from './util/assertString'; +var imeiRegexWithoutHypens = /^[0-9]{15}$/; +var imeiRegexWithHypens = /^\d{2}-\d{6}-\d{6}-\d{1}$/; +export default function isIMEI(str, options) { + assertString(str); + options = options || {}; // default regex for checking imei is the one without hyphens + + var imeiRegex = imeiRegexWithoutHypens; + + if (options.allow_hyphens) { + imeiRegex = imeiRegexWithHypens; + } + + if (!imeiRegex.test(str)) { + return false; + } + + str = str.replace(/-/g, ''); + var sum = 0, + mul = 2, + l = 14; + + for (var i = 0; i < l; i++) { + var digit = str.substring(l - i - 1, l - i); + var tp = parseInt(digit, 10) * mul; + + if (tp >= 10) { + sum += tp % 10 + 1; + } else { + sum += tp; + } + + if (mul === 1) { + mul += 1; + } else { + mul -= 1; + } + } + + var chk = (10 - sum % 10) % 10; + + if (chk !== parseInt(str.substring(14, 15), 10)) { + return false; + } + + return true; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isIP.js b/node_modules/validator/es/lib/isIP.js new file mode 100644 index 0000000..5cf3fd2 --- /dev/null +++ b/node_modules/validator/es/lib/isIP.js @@ -0,0 +1,62 @@ +import assertString from './util/assertString'; +/** +11.3. Examples + + The following addresses + + fe80::1234 (on the 1st link of the node) + ff02::5678 (on the 5th link of the node) + ff08::9abc (on the 10th organization of the node) + + would be represented as follows: + + fe80::1234%1 + ff02::5678%5 + ff08::9abc%10 + + (Here we assume a natural translation from a zone index to the + part, where the Nth zone of any scope is translated into + "N".) + + If we use interface names as , those addresses could also be + represented as follows: + + fe80::1234%ne0 + ff02::5678%pvc1.3 + ff08::9abc%interface10 + + where the interface "ne0" belongs to the 1st link, "pvc1.3" belongs + to the 5th link, and "interface10" belongs to the 10th organization. + * * */ + +var IPv4SegmentFormat = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'; +var IPv4AddressFormat = "(".concat(IPv4SegmentFormat, "[.]){3}").concat(IPv4SegmentFormat); +var IPv4AddressRegExp = new RegExp("^".concat(IPv4AddressFormat, "$")); +var IPv6SegmentFormat = '(?:[0-9a-fA-F]{1,4})'; +var IPv6AddressRegExp = new RegExp('^(' + "(?:".concat(IPv6SegmentFormat, ":){7}(?:").concat(IPv6SegmentFormat, "|:)|") + "(?:".concat(IPv6SegmentFormat, ":){6}(?:").concat(IPv4AddressFormat, "|:").concat(IPv6SegmentFormat, "|:)|") + "(?:".concat(IPv6SegmentFormat, ":){5}(?::").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,2}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){4}(?:(:").concat(IPv6SegmentFormat, "){0,1}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,3}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){3}(?:(:").concat(IPv6SegmentFormat, "){0,2}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,4}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){2}(?:(:").concat(IPv6SegmentFormat, "){0,3}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,5}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){1}(?:(:").concat(IPv6SegmentFormat, "){0,4}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,6}|:)|") + "(?::((?::".concat(IPv6SegmentFormat, "){0,5}:").concat(IPv4AddressFormat, "|(?::").concat(IPv6SegmentFormat, "){1,7}|:))") + ')(%[0-9a-zA-Z-.:]{1,})?$'); +export default function isIP(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + assertString(str); + version = String(version); + + if (!version) { + return isIP(str, 4) || isIP(str, 6); + } + + if (version === '4') { + if (!IPv4AddressRegExp.test(str)) { + return false; + } + + var parts = str.split('.').sort(function (a, b) { + return a - b; + }); + return parts[3] <= 255; + } + + if (version === '6') { + return !!IPv6AddressRegExp.test(str); + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isIPRange.js b/node_modules/validator/es/lib/isIPRange.js new file mode 100644 index 0000000..209efdd --- /dev/null +++ b/node_modules/validator/es/lib/isIPRange.js @@ -0,0 +1,47 @@ +import assertString from './util/assertString'; +import isIP from './isIP'; +var subnetMaybe = /^\d{1,3}$/; +var v4Subnet = 32; +var v6Subnet = 128; +export default function isIPRange(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + assertString(str); + var parts = str.split('/'); // parts[0] -> ip, parts[1] -> subnet + + if (parts.length !== 2) { + return false; + } + + if (!subnetMaybe.test(parts[1])) { + return false; + } // Disallow preceding 0 i.e. 01, 02, ... + + + if (parts[1].length > 1 && parts[1].startsWith('0')) { + return false; + } + + var isValidIP = isIP(parts[0], version); + + if (!isValidIP) { + return false; + } // Define valid subnet according to IP's version + + + var expectedSubnet = null; + + switch (String(version)) { + case '4': + expectedSubnet = v4Subnet; + break; + + case '6': + expectedSubnet = v6Subnet; + break; + + default: + expectedSubnet = isIP(parts[0], '6') ? v6Subnet : v4Subnet; + } + + return parts[1] <= expectedSubnet && parts[1] >= 0; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISBN.js b/node_modules/validator/es/lib/isISBN.js new file mode 100644 index 0000000..da2c435 --- /dev/null +++ b/node_modules/validator/es/lib/isISBN.js @@ -0,0 +1,51 @@ +import assertString from './util/assertString'; +var isbn10Maybe = /^(?:[0-9]{9}X|[0-9]{10})$/; +var isbn13Maybe = /^(?:[0-9]{13})$/; +var factor = [1, 3]; +export default function isISBN(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + assertString(str); + version = String(version); + + if (!version) { + return isISBN(str, 10) || isISBN(str, 13); + } + + var sanitized = str.replace(/[\s-]+/g, ''); + var checksum = 0; + var i; + + if (version === '10') { + if (!isbn10Maybe.test(sanitized)) { + return false; + } + + for (i = 0; i < 9; i++) { + checksum += (i + 1) * sanitized.charAt(i); + } + + if (sanitized.charAt(9) === 'X') { + checksum += 10 * 10; + } else { + checksum += 10 * sanitized.charAt(9); + } + + if (checksum % 11 === 0) { + return !!sanitized; + } + } else if (version === '13') { + if (!isbn13Maybe.test(sanitized)) { + return false; + } + + for (i = 0; i < 12; i++) { + checksum += factor[i % 2] * sanitized.charAt(i); + } + + if (sanitized.charAt(12) - (10 - checksum % 10) % 10 === 0) { + return !!sanitized; + } + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISIN.js b/node_modules/validator/es/lib/isISIN.js new file mode 100644 index 0000000..8e5b826 --- /dev/null +++ b/node_modules/validator/es/lib/isISIN.js @@ -0,0 +1,60 @@ +import assertString from './util/assertString'; +var isin = /^[A-Z]{2}[0-9A-Z]{9}[0-9]$/; // this link details how the check digit is calculated: +// https://www.isin.org/isin-format/. it is a little bit +// odd in that it works with digits, not numbers. in order +// to make only one pass through the ISIN characters, the +// each alpha character is handled as 2 characters within +// the loop. + +export default function isISIN(str) { + assertString(str); + + if (!isin.test(str)) { + return false; + } + + var _double = true; + var sum = 0; // convert values + + for (var i = str.length - 2; i >= 0; i--) { + if (str[i] >= 'A' && str[i] <= 'Z') { + var value = str[i].charCodeAt(0) - 55; + var lo = value % 10; + var hi = Math.trunc(value / 10); // letters have two digits, so handle the low order + // and high order digits separately. + + for (var _i = 0, _arr = [lo, hi]; _i < _arr.length; _i++) { + var digit = _arr[_i]; + + if (_double) { + if (digit >= 5) { + sum += 1 + (digit - 5) * 2; + } else { + sum += digit * 2; + } + } else { + sum += digit; + } + + _double = !_double; + } + } else { + var _digit = str[i].charCodeAt(0) - '0'.charCodeAt(0); + + if (_double) { + if (_digit >= 5) { + sum += 1 + (_digit - 5) * 2; + } else { + sum += _digit * 2; + } + } else { + sum += _digit; + } + + _double = !_double; + } + } + + var check = Math.trunc((sum + 9) / 10) * 10 - sum; + return +str[str.length - 1] === check; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISO31661Alpha2.js b/node_modules/validator/es/lib/isISO31661Alpha2.js new file mode 100644 index 0000000..e8db93e --- /dev/null +++ b/node_modules/validator/es/lib/isISO31661Alpha2.js @@ -0,0 +1,8 @@ +import assertString from './util/assertString'; // from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 + +var validISO31661Alpha2CountriesCodes = new Set(['AD', 'AE', 'AF', 'AG', 'AI', 'AL', 'AM', 'AO', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AW', 'AX', 'AZ', 'BA', 'BB', 'BD', 'BE', 'BF', 'BG', 'BH', 'BI', 'BJ', 'BL', 'BM', 'BN', 'BO', 'BQ', 'BR', 'BS', 'BT', 'BV', 'BW', 'BY', 'BZ', 'CA', 'CC', 'CD', 'CF', 'CG', 'CH', 'CI', 'CK', 'CL', 'CM', 'CN', 'CO', 'CR', 'CU', 'CV', 'CW', 'CX', 'CY', 'CZ', 'DE', 'DJ', 'DK', 'DM', 'DO', 'DZ', 'EC', 'EE', 'EG', 'EH', 'ER', 'ES', 'ET', 'FI', 'FJ', 'FK', 'FM', 'FO', 'FR', 'GA', 'GB', 'GD', 'GE', 'GF', 'GG', 'GH', 'GI', 'GL', 'GM', 'GN', 'GP', 'GQ', 'GR', 'GS', 'GT', 'GU', 'GW', 'GY', 'HK', 'HM', 'HN', 'HR', 'HT', 'HU', 'ID', 'IE', 'IL', 'IM', 'IN', 'IO', 'IQ', 'IR', 'IS', 'IT', 'JE', 'JM', 'JO', 'JP', 'KE', 'KG', 'KH', 'KI', 'KM', 'KN', 'KP', 'KR', 'KW', 'KY', 'KZ', 'LA', 'LB', 'LC', 'LI', 'LK', 'LR', 'LS', 'LT', 'LU', 'LV', 'LY', 'MA', 'MC', 'MD', 'ME', 'MF', 'MG', 'MH', 'MK', 'ML', 'MM', 'MN', 'MO', 'MP', 'MQ', 'MR', 'MS', 'MT', 'MU', 'MV', 'MW', 'MX', 'MY', 'MZ', 'NA', 'NC', 'NE', 'NF', 'NG', 'NI', 'NL', 'NO', 'NP', 'NR', 'NU', 'NZ', 'OM', 'PA', 'PE', 'PF', 'PG', 'PH', 'PK', 'PL', 'PM', 'PN', 'PR', 'PS', 'PT', 'PW', 'PY', 'QA', 'RE', 'RO', 'RS', 'RU', 'RW', 'SA', 'SB', 'SC', 'SD', 'SE', 'SG', 'SH', 'SI', 'SJ', 'SK', 'SL', 'SM', 'SN', 'SO', 'SR', 'SS', 'ST', 'SV', 'SX', 'SY', 'SZ', 'TC', 'TD', 'TF', 'TG', 'TH', 'TJ', 'TK', 'TL', 'TM', 'TN', 'TO', 'TR', 'TT', 'TV', 'TW', 'TZ', 'UA', 'UG', 'UM', 'US', 'UY', 'UZ', 'VA', 'VC', 'VE', 'VG', 'VI', 'VN', 'VU', 'WF', 'WS', 'YE', 'YT', 'ZA', 'ZM', 'ZW']); +export default function isISO31661Alpha2(str) { + assertString(str); + return validISO31661Alpha2CountriesCodes.has(str.toUpperCase()); +} +export var CountryCodes = validISO31661Alpha2CountriesCodes; \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISO31661Alpha3.js b/node_modules/validator/es/lib/isISO31661Alpha3.js new file mode 100644 index 0000000..41a615b --- /dev/null +++ b/node_modules/validator/es/lib/isISO31661Alpha3.js @@ -0,0 +1,7 @@ +import assertString from './util/assertString'; // from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3 + +var validISO31661Alpha3CountriesCodes = new Set(['AFG', 'ALA', 'ALB', 'DZA', 'ASM', 'AND', 'AGO', 'AIA', 'ATA', 'ATG', 'ARG', 'ARM', 'ABW', 'AUS', 'AUT', 'AZE', 'BHS', 'BHR', 'BGD', 'BRB', 'BLR', 'BEL', 'BLZ', 'BEN', 'BMU', 'BTN', 'BOL', 'BES', 'BIH', 'BWA', 'BVT', 'BRA', 'IOT', 'BRN', 'BGR', 'BFA', 'BDI', 'KHM', 'CMR', 'CAN', 'CPV', 'CYM', 'CAF', 'TCD', 'CHL', 'CHN', 'CXR', 'CCK', 'COL', 'COM', 'COG', 'COD', 'COK', 'CRI', 'CIV', 'HRV', 'CUB', 'CUW', 'CYP', 'CZE', 'DNK', 'DJI', 'DMA', 'DOM', 'ECU', 'EGY', 'SLV', 'GNQ', 'ERI', 'EST', 'ETH', 'FLK', 'FRO', 'FJI', 'FIN', 'FRA', 'GUF', 'PYF', 'ATF', 'GAB', 'GMB', 'GEO', 'DEU', 'GHA', 'GIB', 'GRC', 'GRL', 'GRD', 'GLP', 'GUM', 'GTM', 'GGY', 'GIN', 'GNB', 'GUY', 'HTI', 'HMD', 'VAT', 'HND', 'HKG', 'HUN', 'ISL', 'IND', 'IDN', 'IRN', 'IRQ', 'IRL', 'IMN', 'ISR', 'ITA', 'JAM', 'JPN', 'JEY', 'JOR', 'KAZ', 'KEN', 'KIR', 'PRK', 'KOR', 'KWT', 'KGZ', 'LAO', 'LVA', 'LBN', 'LSO', 'LBR', 'LBY', 'LIE', 'LTU', 'LUX', 'MAC', 'MKD', 'MDG', 'MWI', 'MYS', 'MDV', 'MLI', 'MLT', 'MHL', 'MTQ', 'MRT', 'MUS', 'MYT', 'MEX', 'FSM', 'MDA', 'MCO', 'MNG', 'MNE', 'MSR', 'MAR', 'MOZ', 'MMR', 'NAM', 'NRU', 'NPL', 'NLD', 'NCL', 'NZL', 'NIC', 'NER', 'NGA', 'NIU', 'NFK', 'MNP', 'NOR', 'OMN', 'PAK', 'PLW', 'PSE', 'PAN', 'PNG', 'PRY', 'PER', 'PHL', 'PCN', 'POL', 'PRT', 'PRI', 'QAT', 'REU', 'ROU', 'RUS', 'RWA', 'BLM', 'SHN', 'KNA', 'LCA', 'MAF', 'SPM', 'VCT', 'WSM', 'SMR', 'STP', 'SAU', 'SEN', 'SRB', 'SYC', 'SLE', 'SGP', 'SXM', 'SVK', 'SVN', 'SLB', 'SOM', 'ZAF', 'SGS', 'SSD', 'ESP', 'LKA', 'SDN', 'SUR', 'SJM', 'SWZ', 'SWE', 'CHE', 'SYR', 'TWN', 'TJK', 'TZA', 'THA', 'TLS', 'TGO', 'TKL', 'TON', 'TTO', 'TUN', 'TUR', 'TKM', 'TCA', 'TUV', 'UGA', 'UKR', 'ARE', 'GBR', 'USA', 'UMI', 'URY', 'UZB', 'VUT', 'VEN', 'VNM', 'VGB', 'VIR', 'WLF', 'ESH', 'YEM', 'ZMB', 'ZWE']); +export default function isISO31661Alpha3(str) { + assertString(str); + return validISO31661Alpha3CountriesCodes.has(str.toUpperCase()); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISO4217.js b/node_modules/validator/es/lib/isISO4217.js new file mode 100644 index 0000000..3033a04 --- /dev/null +++ b/node_modules/validator/es/lib/isISO4217.js @@ -0,0 +1,8 @@ +import assertString from './util/assertString'; // from https://en.wikipedia.org/wiki/ISO_4217 + +var validISO4217CurrencyCodes = new Set(['AED', 'AFN', 'ALL', 'AMD', 'ANG', 'AOA', 'ARS', 'AUD', 'AWG', 'AZN', 'BAM', 'BBD', 'BDT', 'BGN', 'BHD', 'BIF', 'BMD', 'BND', 'BOB', 'BOV', 'BRL', 'BSD', 'BTN', 'BWP', 'BYN', 'BZD', 'CAD', 'CDF', 'CHE', 'CHF', 'CHW', 'CLF', 'CLP', 'CNY', 'COP', 'COU', 'CRC', 'CUC', 'CUP', 'CVE', 'CZK', 'DJF', 'DKK', 'DOP', 'DZD', 'EGP', 'ERN', 'ETB', 'EUR', 'FJD', 'FKP', 'GBP', 'GEL', 'GHS', 'GIP', 'GMD', 'GNF', 'GTQ', 'GYD', 'HKD', 'HNL', 'HRK', 'HTG', 'HUF', 'IDR', 'ILS', 'INR', 'IQD', 'IRR', 'ISK', 'JMD', 'JOD', 'JPY', 'KES', 'KGS', 'KHR', 'KMF', 'KPW', 'KRW', 'KWD', 'KYD', 'KZT', 'LAK', 'LBP', 'LKR', 'LRD', 'LSL', 'LYD', 'MAD', 'MDL', 'MGA', 'MKD', 'MMK', 'MNT', 'MOP', 'MRU', 'MUR', 'MVR', 'MWK', 'MXN', 'MXV', 'MYR', 'MZN', 'NAD', 'NGN', 'NIO', 'NOK', 'NPR', 'NZD', 'OMR', 'PAB', 'PEN', 'PGK', 'PHP', 'PKR', 'PLN', 'PYG', 'QAR', 'RON', 'RSD', 'RUB', 'RWF', 'SAR', 'SBD', 'SCR', 'SDG', 'SEK', 'SGD', 'SHP', 'SLL', 'SOS', 'SRD', 'SSP', 'STN', 'SVC', 'SYP', 'SZL', 'THB', 'TJS', 'TMT', 'TND', 'TOP', 'TRY', 'TTD', 'TWD', 'TZS', 'UAH', 'UGX', 'USD', 'USN', 'UYI', 'UYU', 'UYW', 'UZS', 'VES', 'VND', 'VUV', 'WST', 'XAF', 'XAG', 'XAU', 'XBA', 'XBB', 'XBC', 'XBD', 'XCD', 'XDR', 'XOF', 'XPD', 'XPF', 'XPT', 'XSU', 'XTS', 'XUA', 'XXX', 'YER', 'ZAR', 'ZMW', 'ZWL']); +export default function isISO4217(str) { + assertString(str); + return validISO4217CurrencyCodes.has(str.toUpperCase()); +} +export var CurrencyCodes = validISO4217CurrencyCodes; \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISO8601.js b/node_modules/validator/es/lib/isISO8601.js new file mode 100644 index 0000000..8e71215 --- /dev/null +++ b/node_modules/validator/es/lib/isISO8601.js @@ -0,0 +1,47 @@ +import assertString from './util/assertString'; +/* eslint-disable max-len */ +// from http://goo.gl/0ejHHW + +var iso8601 = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; // same as above, except with a strict 'T' separator between date and time + +var iso8601StrictSeparator = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; +/* eslint-enable max-len */ + +var isValidDate = function isValidDate(str) { + // str must have passed the ISO8601 check + // this check is meant to catch invalid dates + // like 2009-02-31 + // first check for ordinal dates + var ordinalMatch = str.match(/^(\d{4})-?(\d{3})([ T]{1}\.*|$)/); + + if (ordinalMatch) { + var oYear = Number(ordinalMatch[1]); + var oDay = Number(ordinalMatch[2]); // if is leap year + + if (oYear % 4 === 0 && oYear % 100 !== 0 || oYear % 400 === 0) return oDay <= 366; + return oDay <= 365; + } + + var match = str.match(/(\d{4})-?(\d{0,2})-?(\d*)/).map(Number); + var year = match[1]; + var month = match[2]; + var day = match[3]; + var monthString = month ? "0".concat(month).slice(-2) : month; + var dayString = day ? "0".concat(day).slice(-2) : day; // create a date object and compare + + var d = new Date("".concat(year, "-").concat(monthString || '01', "-").concat(dayString || '01')); + + if (month && day) { + return d.getUTCFullYear() === year && d.getUTCMonth() + 1 === month && d.getUTCDate() === day; + } + + return true; +}; + +export default function isISO8601(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + assertString(str); + var check = options.strictSeparator ? iso8601StrictSeparator.test(str) : iso8601.test(str); + if (check && options.strict) return isValidDate(str); + return check; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISRC.js b/node_modules/validator/es/lib/isISRC.js new file mode 100644 index 0000000..275c10a --- /dev/null +++ b/node_modules/validator/es/lib/isISRC.js @@ -0,0 +1,7 @@ +import assertString from './util/assertString'; // see http://isrc.ifpi.org/en/isrc-standard/code-syntax + +var isrc = /^[A-Z]{2}[0-9A-Z]{3}\d{2}\d{5}$/; +export default function isISRC(str) { + assertString(str); + return isrc.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isISSN.js b/node_modules/validator/es/lib/isISSN.js new file mode 100644 index 0000000..bebfa9e --- /dev/null +++ b/node_modules/validator/es/lib/isISSN.js @@ -0,0 +1,23 @@ +import assertString from './util/assertString'; +var issn = '^\\d{4}-?\\d{3}[\\dX]$'; +export default function isISSN(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + assertString(str); + var testIssn = issn; + testIssn = options.require_hyphen ? testIssn.replace('?', '') : testIssn; + testIssn = options.case_sensitive ? new RegExp(testIssn) : new RegExp(testIssn, 'i'); + + if (!testIssn.test(str)) { + return false; + } + + var digits = str.replace('-', '').toUpperCase(); + var checksum = 0; + + for (var i = 0; i < digits.length; i++) { + var digit = digits[i]; + checksum += (digit === 'X' ? 10 : +digit) * (8 - i); + } + + return checksum % 11 === 0; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isIdentityCard.js b/node_modules/validator/es/lib/isIdentityCard.js new file mode 100644 index 0000000..3be4a97 --- /dev/null +++ b/node_modules/validator/es/lib/isIdentityCard.js @@ -0,0 +1,369 @@ +import assertString from './util/assertString'; +import isInt from './isInt'; +var validators = { + PL: function PL(str) { + assertString(str); + var weightOfDigits = { + 1: 1, + 2: 3, + 3: 7, + 4: 9, + 5: 1, + 6: 3, + 7: 7, + 8: 9, + 9: 1, + 10: 3, + 11: 0 + }; + + if (str != null && str.length === 11 && isInt(str, { + allow_leading_zeroes: true + })) { + var digits = str.split('').slice(0, -1); + var sum = digits.reduce(function (acc, digit, index) { + return acc + Number(digit) * weightOfDigits[index + 1]; + }, 0); + var modulo = sum % 10; + var lastDigit = Number(str.charAt(str.length - 1)); + + if (modulo === 0 && lastDigit === 0 || lastDigit === 10 - modulo) { + return true; + } + } + + return false; + }, + ES: function ES(str) { + assertString(str); + var DNI = /^[0-9X-Z][0-9]{7}[TRWAGMYFPDXBNJZSQVHLCKE]$/; + var charsValue = { + X: 0, + Y: 1, + Z: 2 + }; + var controlDigits = ['T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J', 'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E']; // sanitize user input + + var sanitized = str.trim().toUpperCase(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } // validate the control digit + + + var number = sanitized.slice(0, -1).replace(/[X,Y,Z]/g, function (_char) { + return charsValue[_char]; + }); + return sanitized.endsWith(controlDigits[number % 23]); + }, + FI: function FI(str) { + // https://dvv.fi/en/personal-identity-code#:~:text=control%20character%20for%20a-,personal,-identity%20code%20calculated + assertString(str); + + if (str.length !== 11) { + return false; + } + + if (!str.match(/^\d{6}[\-A\+]\d{3}[0-9ABCDEFHJKLMNPRSTUVWXY]{1}$/)) { + return false; + } + + var checkDigits = '0123456789ABCDEFHJKLMNPRSTUVWXY'; + var idAsNumber = parseInt(str.slice(0, 6), 10) * 1000 + parseInt(str.slice(7, 10), 10); + var remainder = idAsNumber % 31; + var checkDigit = checkDigits[remainder]; + return checkDigit === str.slice(10, 11); + }, + IN: function IN(str) { + var DNI = /^[1-9]\d{3}\s?\d{4}\s?\d{4}$/; // multiplication table + + var d = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 0, 6, 7, 8, 9, 5], [2, 3, 4, 0, 1, 7, 8, 9, 5, 6], [3, 4, 0, 1, 2, 8, 9, 5, 6, 7], [4, 0, 1, 2, 3, 9, 5, 6, 7, 8], [5, 9, 8, 7, 6, 0, 4, 3, 2, 1], [6, 5, 9, 8, 7, 1, 0, 4, 3, 2], [7, 6, 5, 9, 8, 2, 1, 0, 4, 3], [8, 7, 6, 5, 9, 3, 2, 1, 0, 4], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]; // permutation table + + var p = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 5, 7, 6, 2, 8, 3, 0, 9, 4], [5, 8, 0, 3, 7, 9, 6, 1, 4, 2], [8, 9, 1, 6, 0, 4, 3, 5, 2, 7], [9, 4, 5, 3, 1, 2, 6, 8, 7, 0], [4, 2, 8, 6, 5, 7, 3, 9, 0, 1], [2, 7, 9, 3, 8, 0, 6, 4, 1, 5], [7, 0, 4, 6, 9, 1, 3, 2, 5, 8]]; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + var c = 0; + var invertedArray = sanitized.replace(/\s/g, '').split('').map(Number).reverse(); + invertedArray.forEach(function (val, i) { + c = d[c][p[i % 8][val]]; + }); + return c === 0; + }, + IR: function IR(str) { + if (!str.match(/^\d{10}$/)) return false; + str = "0000".concat(str).substr(str.length - 6); + if (parseInt(str.substr(3, 6), 10) === 0) return false; + var lastNumber = parseInt(str.substr(9, 1), 10); + var sum = 0; + + for (var i = 0; i < 9; i++) { + sum += parseInt(str.substr(i, 1), 10) * (10 - i); + } + + sum %= 11; + return sum < 2 && lastNumber === sum || sum >= 2 && lastNumber === 11 - sum; + }, + IT: function IT(str) { + if (str.length !== 9) return false; + if (str === 'CA00000AA') return false; // https://it.wikipedia.org/wiki/Carta_d%27identit%C3%A0_elettronica_italiana + + return str.search(/C[A-Z][0-9]{5}[A-Z]{2}/i) > -1; + }, + NO: function NO(str) { + var sanitized = str.trim(); + if (isNaN(Number(sanitized))) return false; + if (sanitized.length !== 11) return false; + if (sanitized === '00000000000') return false; // https://no.wikipedia.org/wiki/F%C3%B8dselsnummer + + var f = sanitized.split('').map(Number); + var k1 = (11 - (3 * f[0] + 7 * f[1] + 6 * f[2] + 1 * f[3] + 8 * f[4] + 9 * f[5] + 4 * f[6] + 5 * f[7] + 2 * f[8]) % 11) % 11; + var k2 = (11 - (5 * f[0] + 4 * f[1] + 3 * f[2] + 2 * f[3] + 7 * f[4] + 6 * f[5] + 5 * f[6] + 4 * f[7] + 3 * f[8] + 2 * k1) % 11) % 11; + if (k1 !== f[9] || k2 !== f[10]) return false; + return true; + }, + TH: function TH(str) { + if (!str.match(/^[1-8]\d{12}$/)) return false; // validate check digit + + var sum = 0; + + for (var i = 0; i < 12; i++) { + sum += parseInt(str[i], 10) * (13 - i); + } + + return str[12] === ((11 - sum % 11) % 10).toString(); + }, + LK: function LK(str) { + var old_nic = /^[1-9]\d{8}[vx]$/i; + var new_nic = /^[1-9]\d{11}$/i; + if (str.length === 10 && old_nic.test(str)) return true;else if (str.length === 12 && new_nic.test(str)) return true; + return false; + }, + 'he-IL': function heIL(str) { + var DNI = /^\d{9}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + var id = sanitized; + var sum = 0, + incNum; + + for (var i = 0; i < id.length; i++) { + incNum = Number(id[i]) * (i % 2 + 1); // Multiply number by 1 or 2 + + sum += incNum > 9 ? incNum - 9 : incNum; // Sum the digits up and add to total + } + + return sum % 10 === 0; + }, + 'ar-LY': function arLY(str) { + // Libya National Identity Number NIN is 12 digits, the first digit is either 1 or 2 + var NIN = /^(1|2)\d{11}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!NIN.test(sanitized)) { + return false; + } + + return true; + }, + 'ar-TN': function arTN(str) { + var DNI = /^\d{8}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + return true; + }, + 'zh-CN': function zhCN(str) { + var provincesAndCities = ['11', // 北京 + '12', // 天津 + '13', // 河北 + '14', // 山西 + '15', // 内蒙古 + '21', // 辽宁 + '22', // 吉林 + '23', // 黑龙江 + '31', // 上海 + '32', // 江苏 + '33', // 浙江 + '34', // 安徽 + '35', // 福建 + '36', // 江西 + '37', // 山东 + '41', // 河南 + '42', // 湖北 + '43', // 湖南 + '44', // 广东 + '45', // 广西 + '46', // 海南 + '50', // 重庆 + '51', // 四川 + '52', // 贵州 + '53', // 云南 + '54', // 西藏 + '61', // 陕西 + '62', // 甘肃 + '63', // 青海 + '64', // 宁夏 + '65', // 新疆 + '71', // 台湾 + '81', // 香港 + '82', // 澳门 + '91' // 国外 + ]; + var powers = ['7', '9', '10', '5', '8', '4', '2', '1', '6', '3', '7', '9', '10', '5', '8', '4', '2']; + var parityBit = ['1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2']; + + var checkAddressCode = function checkAddressCode(addressCode) { + return provincesAndCities.includes(addressCode); + }; + + var checkBirthDayCode = function checkBirthDayCode(birDayCode) { + var yyyy = parseInt(birDayCode.substring(0, 4), 10); + var mm = parseInt(birDayCode.substring(4, 6), 10); + var dd = parseInt(birDayCode.substring(6), 10); + var xdata = new Date(yyyy, mm - 1, dd); + + if (xdata > new Date()) { + return false; // eslint-disable-next-line max-len + } else if (xdata.getFullYear() === yyyy && xdata.getMonth() === mm - 1 && xdata.getDate() === dd) { + return true; + } + + return false; + }; + + var getParityBit = function getParityBit(idCardNo) { + var id17 = idCardNo.substring(0, 17); + var power = 0; + + for (var i = 0; i < 17; i++) { + power += parseInt(id17.charAt(i), 10) * parseInt(powers[i], 10); + } + + var mod = power % 11; + return parityBit[mod]; + }; + + var checkParityBit = function checkParityBit(idCardNo) { + return getParityBit(idCardNo) === idCardNo.charAt(17).toUpperCase(); + }; + + var check15IdCardNo = function check15IdCardNo(idCardNo) { + var check = /^[1-9]\d{7}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}$/.test(idCardNo); + if (!check) return false; + var addressCode = idCardNo.substring(0, 2); + check = checkAddressCode(addressCode); + if (!check) return false; + var birDayCode = "19".concat(idCardNo.substring(6, 12)); + check = checkBirthDayCode(birDayCode); + if (!check) return false; + return true; + }; + + var check18IdCardNo = function check18IdCardNo(idCardNo) { + var check = /^[1-9]\d{5}[1-9]\d{3}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}(\d|x|X)$/.test(idCardNo); + if (!check) return false; + var addressCode = idCardNo.substring(0, 2); + check = checkAddressCode(addressCode); + if (!check) return false; + var birDayCode = idCardNo.substring(6, 14); + check = checkBirthDayCode(birDayCode); + if (!check) return false; + return checkParityBit(idCardNo); + }; + + var checkIdCardNo = function checkIdCardNo(idCardNo) { + var check = /^\d{15}|(\d{17}(\d|x|X))$/.test(idCardNo); + if (!check) return false; + + if (idCardNo.length === 15) { + return check15IdCardNo(idCardNo); + } + + return check18IdCardNo(idCardNo); + }; + + return checkIdCardNo(str); + }, + 'zh-TW': function zhTW(str) { + var ALPHABET_CODES = { + A: 10, + B: 11, + C: 12, + D: 13, + E: 14, + F: 15, + G: 16, + H: 17, + I: 34, + J: 18, + K: 19, + L: 20, + M: 21, + N: 22, + O: 35, + P: 23, + Q: 24, + R: 25, + S: 26, + T: 27, + U: 28, + V: 29, + W: 32, + X: 30, + Y: 31, + Z: 33 + }; + var sanitized = str.trim().toUpperCase(); + if (!/^[A-Z][0-9]{9}$/.test(sanitized)) return false; + return Array.from(sanitized).reduce(function (sum, number, index) { + if (index === 0) { + var code = ALPHABET_CODES[number]; + return code % 10 * 9 + Math.floor(code / 10); + } + + if (index === 9) { + return (10 - sum % 10 - Number(number)) % 10 === 0; + } + + return sum + Number(number) * (9 - index); + }, 0); + } +}; +export default function isIdentityCard(str, locale) { + assertString(str); + + if (locale in validators) { + return validators[locale](str); + } else if (locale === 'any') { + for (var key in validators) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (validators.hasOwnProperty(key)) { + var validator = validators[key]; + + if (validator(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isIn.js b/node_modules/validator/es/lib/isIn.js new file mode 100644 index 0000000..452429d --- /dev/null +++ b/node_modules/validator/es/lib/isIn.js @@ -0,0 +1,28 @@ +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +import assertString from './util/assertString'; +import toString from './util/toString'; +export default function isIn(str, options) { + assertString(str); + var i; + + if (Object.prototype.toString.call(options) === '[object Array]') { + var array = []; + + for (i in options) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if ({}.hasOwnProperty.call(options, i)) { + array[i] = toString(options[i]); + } + } + + return array.indexOf(str) >= 0; + } else if (_typeof(options) === 'object') { + return options.hasOwnProperty(str); + } else if (options && typeof options.indexOf === 'function') { + return options.indexOf(str) >= 0; + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isInt.js b/node_modules/validator/es/lib/isInt.js new file mode 100644 index 0000000..b58dab4 --- /dev/null +++ b/node_modules/validator/es/lib/isInt.js @@ -0,0 +1,16 @@ +import assertString from './util/assertString'; +var _int = /^(?:[-+]?(?:0|[1-9][0-9]*))$/; +var intLeadingZeroes = /^[-+]?[0-9]+$/; +export default function isInt(str, options) { + assertString(str); + options = options || {}; // Get the regex to use for testing, based on whether + // leading zeroes are allowed or not. + + var regex = options.hasOwnProperty('allow_leading_zeroes') && !options.allow_leading_zeroes ? _int : intLeadingZeroes; // Check min/max/lt/gt + + var minCheckPassed = !options.hasOwnProperty('min') || str >= options.min; + var maxCheckPassed = !options.hasOwnProperty('max') || str <= options.max; + var ltCheckPassed = !options.hasOwnProperty('lt') || str < options.lt; + var gtCheckPassed = !options.hasOwnProperty('gt') || str > options.gt; + return regex.test(str) && minCheckPassed && maxCheckPassed && ltCheckPassed && gtCheckPassed; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isJSON.js b/node_modules/validator/es/lib/isJSON.js new file mode 100644 index 0000000..bd110cc --- /dev/null +++ b/node_modules/validator/es/lib/isJSON.js @@ -0,0 +1,26 @@ +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +import assertString from './util/assertString'; +import merge from './util/merge'; +var default_json_options = { + allow_primitives: false +}; +export default function isJSON(str, options) { + assertString(str); + + try { + options = merge(options, default_json_options); + var primitives = []; + + if (options.allow_primitives) { + primitives = [null, false, true]; + } + + var obj = JSON.parse(str); + return primitives.includes(obj) || !!obj && _typeof(obj) === 'object'; + } catch (e) { + /* ignore */ + } + + return false; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isJWT.js b/node_modules/validator/es/lib/isJWT.js new file mode 100644 index 0000000..c1afb7b --- /dev/null +++ b/node_modules/validator/es/lib/isJWT.js @@ -0,0 +1,17 @@ +import assertString from './util/assertString'; +import isBase64 from './isBase64'; +export default function isJWT(str) { + assertString(str); + var dotSplit = str.split('.'); + var len = dotSplit.length; + + if (len > 3 || len < 2) { + return false; + } + + return dotSplit.reduce(function (acc, currElem) { + return acc && isBase64(currElem, { + urlSafe: true + }); + }, true); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isLatLong.js b/node_modules/validator/es/lib/isLatLong.js new file mode 100644 index 0000000..7362c1d --- /dev/null +++ b/node_modules/validator/es/lib/isLatLong.js @@ -0,0 +1,22 @@ +import assertString from './util/assertString'; +import merge from './util/merge'; +var lat = /^\(?[+-]?(90(\.0+)?|[1-8]?\d(\.\d+)?)$/; +var _long = /^\s?[+-]?(180(\.0+)?|1[0-7]\d(\.\d+)?|\d{1,2}(\.\d+)?)\)?$/; +var latDMS = /^(([1-8]?\d)\D+([1-5]?\d|60)\D+([1-5]?\d|60)(\.\d+)?|90\D+0\D+0)\D+[NSns]?$/i; +var longDMS = /^\s*([1-7]?\d{1,2}\D+([1-5]?\d|60)\D+([1-5]?\d|60)(\.\d+)?|180\D+0\D+0)\D+[EWew]?$/i; +var defaultLatLongOptions = { + checkDMS: false +}; +export default function isLatLong(str, options) { + assertString(str); + options = merge(options, defaultLatLongOptions); + if (!str.includes(',')) return false; + var pair = str.split(','); + if (pair[0].startsWith('(') && !pair[1].endsWith(')') || pair[1].endsWith(')') && !pair[0].startsWith('(')) return false; + + if (options.checkDMS) { + return latDMS.test(pair[0]) && longDMS.test(pair[1]); + } + + return lat.test(pair[0]) && _long.test(pair[1]); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isLength.js b/node_modules/validator/es/lib/isLength.js new file mode 100644 index 0000000..3b1a5fd --- /dev/null +++ b/node_modules/validator/es/lib/isLength.js @@ -0,0 +1,23 @@ +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +import assertString from './util/assertString'; +/* eslint-disable prefer-rest-params */ + +export default function isLength(str, options) { + assertString(str); + var min; + var max; + + if (_typeof(options) === 'object') { + min = options.min || 0; + max = options.max; + } else { + // backwards compatibility: isLength(str, min [, max]) + min = arguments[1] || 0; + max = arguments[2]; + } + + var surrogatePairs = str.match(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g) || []; + var len = str.length - surrogatePairs.length; + return len >= min && (typeof max === 'undefined' || len <= max); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isLicensePlate.js b/node_modules/validator/es/lib/isLicensePlate.js new file mode 100644 index 0000000..c2a26bf --- /dev/null +++ b/node_modules/validator/es/lib/isLicensePlate.js @@ -0,0 +1,44 @@ +import assertString from './util/assertString'; +var validators = { + 'cs-CZ': function csCZ(str) { + return /^(([ABCDEFHKIJKLMNPRSTUVXYZ]|[0-9])-?){5,8}$/.test(str); + }, + 'de-DE': function deDE(str) { + return /^((AW|UL|AK|GA|AÖ|LF|AZ|AM|AS|ZE|AN|AB|A|KG|KH|BA|EW|BZ|HY|KM|BT|HP|B|BC|BI|BO|FN|TT|ÜB|BN|AH|BS|FR|HB|ZZ|BB|BK|BÖ|OC|OK|CW|CE|C|CO|LH|CB|KW|LC|LN|DA|DI|DE|DH|SY|NÖ|DO|DD|DU|DN|D|EI|EA|EE|FI|EM|EL|EN|PF|ED|EF|ER|AU|ZP|E|ES|NT|EU|FL|FO|FT|FF|F|FS|FD|FÜ|GE|G|GI|GF|GS|ZR|GG|GP|GR|NY|ZI|GÖ|GZ|GT|HA|HH|HM|HU|WL|HZ|WR|RN|HK|HD|HN|HS|GK|HE|HF|RZ|HI|HG|HO|HX|IK|IL|IN|J|JL|KL|KA|KS|KF|KE|KI|KT|KO|KN|KR|KC|KU|K|LD|LL|LA|L|OP|LM|LI|LB|LU|LÖ|HL|LG|MD|GN|MZ|MA|ML|MR|MY|AT|DM|MC|NZ|RM|RG|MM|ME|MB|MI|FG|DL|HC|MW|RL|MK|MG|MÜ|WS|MH|M|MS|NU|NB|ND|NM|NK|NW|NR|NI|NF|DZ|EB|OZ|TG|TO|N|OA|GM|OB|CA|EH|FW|OF|OL|OE|OG|BH|LR|OS|AA|GD|OH|KY|NP|WK|PB|PA|PE|PI|PS|P|PM|PR|RA|RV|RE|R|H|SB|WN|RS|RD|RT|BM|NE|GV|RP|SU|GL|RO|GÜ|RH|EG|RW|PN|SK|MQ|RU|SZ|RI|SL|SM|SC|HR|FZ|VS|SW|SN|CR|SE|SI|SO|LP|SG|NH|SP|IZ|ST|BF|TE|HV|OD|SR|S|AC|DW|ZW|TF|TS|TR|TÜ|UM|PZ|TP|UE|UN|UH|MN|KK|VB|V|AE|PL|RC|VG|GW|PW|VR|VK|KB|WA|WT|BE|WM|WE|AP|MO|WW|FB|WZ|WI|WB|JE|WF|WO|W|WÜ|BL|Z|GC)[- ]?[A-Z]{1,2}[- ]?\d{1,4}|(AIC|FDB|ABG|SLN|SAW|KLZ|BUL|ESB|NAB|SUL|WST|ABI|AZE|BTF|KÖT|DKB|FEU|ROT|ALZ|SMÜ|WER|AUR|NOR|DÜW|BRK|HAB|TÖL|WOR|BAD|BAR|BER|BIW|EBS|KEM|MÜB|PEG|BGL|BGD|REI|WIL|BKS|BIR|WAT|BOR|BOH|BOT|BRB|BLK|HHM|NEB|NMB|WSF|LEO|HDL|WMS|WZL|BÜS|CHA|KÖZ|ROD|WÜM|CLP|NEC|COC|ZEL|COE|CUX|DAH|LDS|DEG|DEL|RSL|DLG|DGF|LAN|HEI|MED|DON|KIB|ROK|JÜL|MON|SLE|EBE|EIC|HIG|WBS|BIT|PRÜ|LIB|EMD|WIT|ERH|HÖS|ERZ|ANA|ASZ|MAB|MEK|STL|SZB|FDS|HCH|HOR|WOL|FRG|GRA|WOS|FRI|FFB|GAP|GER|BRL|CLZ|GTH|NOH|HGW|GRZ|LÖB|NOL|WSW|DUD|HMÜ|OHA|KRU|HAL|HAM|HBS|QLB|HVL|NAU|HAS|EBN|GEO|HOH|HDH|ERK|HER|WAN|HEF|ROF|HBN|ALF|HSK|USI|NAI|REH|SAN|KÜN|ÖHR|HOL|WAR|ARN|BRG|GNT|HOG|WOH|KEH|MAI|PAR|RID|ROL|KLE|GEL|KUS|KYF|ART|SDH|LDK|DIL|MAL|VIB|LER|BNA|GHA|GRM|MTL|WUR|LEV|LIF|STE|WEL|LIP|VAI|LUP|HGN|LBZ|LWL|PCH|STB|DAN|MKK|SLÜ|MSP|TBB|MGH|MTK|BIN|MSH|EIL|HET|SGH|BID|MYK|MSE|MST|MÜR|WRN|MEI|GRH|RIE|MZG|MIL|OBB|BED|FLÖ|MOL|FRW|SEE|SRB|AIB|MOS|BCH|ILL|SOB|NMS|NEA|SEF|UFF|NEW|VOH|NDH|TDO|NWM|GDB|GVM|WIS|NOM|EIN|GAN|LAU|HEB|OHV|OSL|SFB|ERB|LOS|BSK|KEL|BSB|MEL|WTL|OAL|FÜS|MOD|OHZ|OPR|BÜR|PAF|PLÖ|CAS|GLA|REG|VIT|ECK|SIM|GOA|EMS|DIZ|GOH|RÜD|SWA|NES|KÖN|MET|LRO|BÜZ|DBR|ROS|TET|HRO|ROW|BRV|HIP|PAN|GRI|SHK|EIS|SRO|SOK|LBS|SCZ|MER|QFT|SLF|SLS|HOM|SLK|ASL|BBG|SBK|SFT|SHG|MGN|MEG|ZIG|SAD|NEN|OVI|SHA|BLB|SIG|SON|SPN|FOR|GUB|SPB|IGB|WND|STD|STA|SDL|OBG|HST|BOG|SHL|PIR|FTL|SEB|SÖM|SÜW|TIR|SAB|TUT|ANG|SDT|LÜN|LSZ|MHL|VEC|VER|VIE|OVL|ANK|OVP|SBG|UEM|UER|WLG|GMN|NVP|RDG|RÜG|DAU|FKB|WAF|WAK|SLZ|WEN|SOG|APD|WUG|GUN|ESW|WIZ|WES|DIN|BRA|BÜD|WHV|HWI|GHC|WTM|WOB|WUN|MAK|SEL|OCH|HOT|WDA)[- ]?(([A-Z][- ]?\d{1,4})|([A-Z]{2}[- ]?\d{1,3})))[- ]?(E|H)?$/.test(str); + }, + 'de-LI': function deLI(str) { + return /^FL[- ]?\d{1,5}[UZ]?$/.test(str); + }, + 'fi-FI': function fiFI(str) { + return /^(?=.{4,7})(([A-Z]{1,3}|[0-9]{1,3})[\s-]?([A-Z]{1,3}|[0-9]{1,5}))$/.test(str); + }, + 'pt-PT': function ptPT(str) { + return /^([A-Z]{2}|[0-9]{2})[ -·]?([A-Z]{2}|[0-9]{2})[ -·]?([A-Z]{2}|[0-9]{2})$/.test(str); + }, + 'sq-AL': function sqAL(str) { + return /^[A-Z]{2}[- ]?((\d{3}[- ]?(([A-Z]{2})|T))|(R[- ]?\d{3}))$/.test(str); + }, + 'pt-BR': function ptBR(str) { + return /^[A-Z]{3}[ -]?[0-9][A-Z][0-9]{2}|[A-Z]{3}[ -]?[0-9]{4}$/.test(str); + } +}; +export default function isLicensePlate(str, locale) { + assertString(str); + + if (locale in validators) { + return validators[locale](str); + } else if (locale === 'any') { + for (var key in validators) { + /* eslint guard-for-in: 0 */ + var validator = validators[key]; + + if (validator(str)) { + return true; + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isLocale.js b/node_modules/validator/es/lib/isLocale.js new file mode 100644 index 0000000..5033392 --- /dev/null +++ b/node_modules/validator/es/lib/isLocale.js @@ -0,0 +1,11 @@ +import assertString from './util/assertString'; +var localeReg = /^[A-Za-z]{2,4}([_-]([A-Za-z]{4}|[\d]{3}))?([_-]([A-Za-z]{2}|[\d]{3}))?$/; +export default function isLocale(str) { + assertString(str); + + if (str === 'en_US_POSIX' || str === 'ca_ES_VALENCIA') { + return true; + } + + return localeReg.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isLowercase.js b/node_modules/validator/es/lib/isLowercase.js new file mode 100644 index 0000000..0347856 --- /dev/null +++ b/node_modules/validator/es/lib/isLowercase.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function isLowercase(str) { + assertString(str); + return str === str.toLowerCase(); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMACAddress.js b/node_modules/validator/es/lib/isMACAddress.js new file mode 100644 index 0000000..ac23503 --- /dev/null +++ b/node_modules/validator/es/lib/isMACAddress.js @@ -0,0 +1,16 @@ +import assertString from './util/assertString'; +var macAddress = /^(?:[0-9a-fA-F]{2}([-:\s]))([0-9a-fA-F]{2}\1){4}([0-9a-fA-F]{2})$/; +var macAddressNoSeparators = /^([0-9a-fA-F]){12}$/; +var macAddressWithDots = /^([0-9a-fA-F]{4}\.){2}([0-9a-fA-F]{4})$/; +export default function isMACAddress(str, options) { + assertString(str); + /** + * @deprecated `no_colons` TODO: remove it in the next major + */ + + if (options && (options.no_colons || options.no_separators)) { + return macAddressNoSeparators.test(str); + } + + return macAddress.test(str) || macAddressWithDots.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMD5.js b/node_modules/validator/es/lib/isMD5.js new file mode 100644 index 0000000..701ed7b --- /dev/null +++ b/node_modules/validator/es/lib/isMD5.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var md5 = /^[a-f0-9]{32}$/; +export default function isMD5(str) { + assertString(str); + return md5.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMagnetURI.js b/node_modules/validator/es/lib/isMagnetURI.js new file mode 100644 index 0000000..ee4c120 --- /dev/null +++ b/node_modules/validator/es/lib/isMagnetURI.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var magnetURI = /^magnet:\?xt(?:\.1)?=urn:(?:aich|bitprint|btih|ed2k|ed2khash|kzhash|md5|sha1|tree:tiger):[a-z0-9]{32}(?:[a-z0-9]{8})?($|&)/i; +export default function isMagnetURI(url) { + assertString(url); + return magnetURI.test(url.trim()); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMimeType.js b/node_modules/validator/es/lib/isMimeType.js new file mode 100644 index 0000000..cddaf00 --- /dev/null +++ b/node_modules/validator/es/lib/isMimeType.js @@ -0,0 +1,39 @@ +import assertString from './util/assertString'; +/* + Checks if the provided string matches to a correct Media type format (MIME type) + + This function only checks is the string format follows the + etablished rules by the according RFC specifications. + This function supports 'charset' in textual media types + (https://tools.ietf.org/html/rfc6657). + + This function does not check against all the media types listed + by the IANA (https://www.iana.org/assignments/media-types/media-types.xhtml) + because of lightness purposes : it would require to include + all these MIME types in this librairy, which would weigh it + significantly. This kind of effort maybe is not worth for the use that + this function has in this entire librairy. + + More informations in the RFC specifications : + - https://tools.ietf.org/html/rfc2045 + - https://tools.ietf.org/html/rfc2046 + - https://tools.ietf.org/html/rfc7231#section-3.1.1.1 + - https://tools.ietf.org/html/rfc7231#section-3.1.1.5 +*/ +// Match simple MIME types +// NB : +// Subtype length must not exceed 100 characters. +// This rule does not comply to the RFC specs (what is the max length ?). + +var mimeTypeSimple = /^(application|audio|font|image|message|model|multipart|text|video)\/[a-zA-Z0-9\.\-\+]{1,100}$/i; // eslint-disable-line max-len +// Handle "charset" in "text/*" + +var mimeTypeText = /^text\/[a-zA-Z0-9\.\-\+]{1,100};\s?charset=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?$/i; // eslint-disable-line max-len +// Handle "boundary" in "multipart/*" + +var mimeTypeMultipart = /^multipart\/[a-zA-Z0-9\.\-\+]{1,100}(;\s?(boundary|charset)=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?){0,2}$/i; // eslint-disable-line max-len + +export default function isMimeType(str) { + assertString(str); + return mimeTypeSimple.test(str) || mimeTypeText.test(str) || mimeTypeMultipart.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMobilePhone.js b/node_modules/validator/es/lib/isMobilePhone.js new file mode 100644 index 0000000..ae0f04f --- /dev/null +++ b/node_modules/validator/es/lib/isMobilePhone.js @@ -0,0 +1,187 @@ +import assertString from './util/assertString'; +/* eslint-disable max-len */ + +var phones = { + 'am-AM': /^(\+?374|0)((10|[9|7][0-9])\d{6}$|[2-4]\d{7}$)/, + 'ar-AE': /^((\+?971)|0)?5[024568]\d{7}$/, + 'ar-BH': /^(\+?973)?(3|6)\d{7}$/, + 'ar-DZ': /^(\+?213|0)(5|6|7)\d{8}$/, + 'ar-LB': /^(\+?961)?((3|81)\d{6}|7\d{7})$/, + 'ar-EG': /^((\+?20)|0)?1[0125]\d{8}$/, + 'ar-IQ': /^(\+?964|0)?7[0-9]\d{8}$/, + 'ar-JO': /^(\+?962|0)?7[789]\d{7}$/, + 'ar-KW': /^(\+?965)[569]\d{7}$/, + 'ar-LY': /^((\+?218)|0)?(9[1-6]\d{7}|[1-8]\d{7,9})$/, + 'ar-MA': /^(?:(?:\+|00)212|0)[5-7]\d{8}$/, + 'ar-OM': /^((\+|00)968)?(9[1-9])\d{6}$/, + 'ar-PS': /^(\+?970|0)5[6|9](\d{7})$/, + 'ar-SA': /^(!?(\+?966)|0)?5\d{8}$/, + 'ar-SY': /^(!?(\+?963)|0)?9\d{8}$/, + 'ar-TN': /^(\+?216)?[2459]\d{7}$/, + 'az-AZ': /^(\+994|0)(5[015]|7[07]|99)\d{7}$/, + 'bs-BA': /^((((\+|00)3876)|06))((([0-3]|[5-6])\d{6})|(4\d{7}))$/, + 'be-BY': /^(\+?375)?(24|25|29|33|44)\d{7}$/, + 'bg-BG': /^(\+?359|0)?8[789]\d{7}$/, + 'bn-BD': /^(\+?880|0)1[13456789][0-9]{8}$/, + 'ca-AD': /^(\+376)?[346]\d{5}$/, + 'cs-CZ': /^(\+?420)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/, + 'da-DK': /^(\+?45)?\s?\d{2}\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'de-DE': /^((\+49|0)[1|3])([0|5][0-45-9]\d|6([23]|0\d?)|7([0-57-9]|6\d))\d{7,9}$/, + 'de-AT': /^(\+43|0)\d{1,4}\d{3,12}$/, + 'de-CH': /^(\+41|0)([1-9])\d{1,9}$/, + 'de-LU': /^(\+352)?((6\d1)\d{6})$/, + 'dv-MV': /^(\+?960)?(7[2-9]|91|9[3-9])\d{7}$/, + 'el-GR': /^(\+?30|0)?(69\d{8})$/, + 'en-AU': /^(\+?61|0)4\d{8}$/, + 'en-BM': /^(\+?1)?441(((3|7)\d{6}$)|(5[0-3][0-9]\d{4}$)|(59\d{5}))/, + 'en-GB': /^(\+?44|0)7\d{9}$/, + 'en-GG': /^(\+?44|0)1481\d{6}$/, + 'en-GH': /^(\+233|0)(20|50|24|54|27|57|26|56|23|28|55|59)\d{7}$/, + 'en-GY': /^(\+592|0)6\d{6}$/, + 'en-HK': /^(\+?852[-\s]?)?[456789]\d{3}[-\s]?\d{4}$/, + 'en-MO': /^(\+?853[-\s]?)?[6]\d{3}[-\s]?\d{4}$/, + 'en-IE': /^(\+?353|0)8[356789]\d{7}$/, + 'en-IN': /^(\+?91|0)?[6789]\d{9}$/, + 'en-KE': /^(\+?254|0)(7|1)\d{8}$/, + 'en-KI': /^((\+686|686)?)?( )?((6|7)(2|3|8)[0-9]{6})$/, + 'en-MT': /^(\+?356|0)?(99|79|77|21|27|22|25)[0-9]{6}$/, + 'en-MU': /^(\+?230|0)?\d{8}$/, + 'en-NA': /^(\+?264|0)(6|8)\d{7}$/, + 'en-NG': /^(\+?234|0)?[789]\d{9}$/, + 'en-NZ': /^(\+?64|0)[28]\d{7,9}$/, + 'en-PK': /^((00|\+)?92|0)3[0-6]\d{8}$/, + 'en-PH': /^(09|\+639)\d{9}$/, + 'en-RW': /^(\+?250|0)?[7]\d{8}$/, + 'en-SG': /^(\+65)?[3689]\d{7}$/, + 'en-SL': /^(\+?232|0)\d{8}$/, + 'en-TZ': /^(\+?255|0)?[67]\d{8}$/, + 'en-UG': /^(\+?256|0)?[7]\d{8}$/, + 'en-US': /^((\+1|1)?( |-)?)?(\([2-9][0-9]{2}\)|[2-9][0-9]{2})( |-)?([2-9][0-9]{2}( |-)?[0-9]{4})$/, + 'en-ZA': /^(\+?27|0)\d{9}$/, + 'en-ZM': /^(\+?26)?09[567]\d{7}$/, + 'en-ZW': /^(\+263)[0-9]{9}$/, + 'en-BW': /^(\+?267)?(7[1-8]{1})\d{6}$/, + 'es-AR': /^\+?549(11|[2368]\d)\d{8}$/, + 'es-BO': /^(\+?591)?(6|7)\d{7}$/, + 'es-CO': /^(\+?57)?3(0(0|1|2|4|5)|1\d|2[0-4]|5(0|1))\d{7}$/, + 'es-CL': /^(\+?56|0)[2-9]\d{1}\d{7}$/, + 'es-CR': /^(\+506)?[2-8]\d{7}$/, + 'es-CU': /^(\+53|0053)?5\d{7}/, + 'es-DO': /^(\+?1)?8[024]9\d{7}$/, + 'es-HN': /^(\+?504)?[9|8]\d{7}$/, + 'es-EC': /^(\+?593|0)([2-7]|9[2-9])\d{7}$/, + 'es-ES': /^(\+?34)?[6|7]\d{8}$/, + 'es-PE': /^(\+?51)?9\d{8}$/, + 'es-MX': /^(\+?52)?(1|01)?\d{10,11}$/, + 'es-PA': /^(\+?507)\d{7,8}$/, + 'es-PY': /^(\+?595|0)9[9876]\d{7}$/, + 'es-SV': /^(\+?503)?[67]\d{7}$/, + 'es-UY': /^(\+598|0)9[1-9][\d]{6}$/, + 'es-VE': /^(\+?58)?(2|4)\d{9}$/, + 'et-EE': /^(\+?372)?\s?(5|8[1-4])\s?([0-9]\s?){6,7}$/, + 'fa-IR': /^(\+?98[\-\s]?|0)9[0-39]\d[\-\s]?\d{3}[\-\s]?\d{4}$/, + 'fi-FI': /^(\+?358|0)\s?(4(0|1|2|4|5|6)?|50)\s?(\d\s?){4,8}\d$/, + 'fj-FJ': /^(\+?679)?\s?\d{3}\s?\d{4}$/, + 'fo-FO': /^(\+?298)?\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'fr-BF': /^(\+226|0)[67]\d{7}$/, + 'fr-CM': /^(\+?237)6[0-9]{8}$/, + 'fr-FR': /^(\+?33|0)[67]\d{8}$/, + 'fr-GF': /^(\+?594|0|00594)[67]\d{8}$/, + 'fr-GP': /^(\+?590|0|00590)[67]\d{8}$/, + 'fr-MQ': /^(\+?596|0|00596)[67]\d{8}$/, + 'fr-PF': /^(\+?689)?8[789]\d{6}$/, + 'fr-RE': /^(\+?262|0|00262)[67]\d{8}$/, + 'he-IL': /^(\+972|0)([23489]|5[012345689]|77)[1-9]\d{6}$/, + 'hu-HU': /^(\+?36|06)(20|30|31|50|70)\d{7}$/, + 'id-ID': /^(\+?62|0)8(1[123456789]|2[1238]|3[1238]|5[12356789]|7[78]|9[56789]|8[123456789])([\s?|\d]{5,11})$/, + 'it-IT': /^(\+?39)?\s?3\d{2} ?\d{6,7}$/, + 'it-SM': /^((\+378)|(0549)|(\+390549)|(\+3780549))?6\d{5,9}$/, + 'ja-JP': /^(\+81[ \-]?(\(0\))?|0)[6789]0[ \-]?\d{4}[ \-]?\d{4}$/, + 'ka-GE': /^(\+?995)?(5|79)\d{7}$/, + 'kk-KZ': /^(\+?7|8)?7\d{9}$/, + 'kl-GL': /^(\+?299)?\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'ko-KR': /^((\+?82)[ \-]?)?0?1([0|1|6|7|8|9]{1})[ \-]?\d{3,4}[ \-]?\d{4}$/, + 'lt-LT': /^(\+370|8)\d{8}$/, + 'lv-LV': /^(\+?371)2\d{7}$/, + 'ms-MY': /^(\+?6?01){1}(([0145]{1}(\-|\s)?\d{7,8})|([236789]{1}(\s|\-)?\d{7}))$/, + 'mz-MZ': /^(\+?258)?8[234567]\d{7}$/, + 'nb-NO': /^(\+?47)?[49]\d{7}$/, + 'ne-NP': /^(\+?977)?9[78]\d{8}$/, + 'nl-BE': /^(\+?32|0)4\d{8}$/, + 'nl-NL': /^(((\+|00)?31\(0\))|((\+|00)?31)|0)6{1}\d{8}$/, + 'nn-NO': /^(\+?47)?[49]\d{7}$/, + 'pl-PL': /^(\+?48)? ?[5-8]\d ?\d{3} ?\d{2} ?\d{2}$/, + 'pt-BR': /^((\+?55\ ?[1-9]{2}\ ?)|(\+?55\ ?\([1-9]{2}\)\ ?)|(0[1-9]{2}\ ?)|(\([1-9]{2}\)\ ?)|([1-9]{2}\ ?))((\d{4}\-?\d{4})|(9[2-9]{1}\d{3}\-?\d{4}))$/, + 'pt-PT': /^(\+?351)?9[1236]\d{7}$/, + 'pt-AO': /^(\+244)\d{9}$/, + 'ro-RO': /^(\+?4?0)\s?7\d{2}(\/|\s|\.|\-)?\d{3}(\s|\.|\-)?\d{3}$/, + 'ru-RU': /^(\+?7|8)?9\d{9}$/, + 'si-LK': /^(?:0|94|\+94)?(7(0|1|2|4|5|6|7|8)( |-)?)\d{7}$/, + 'sl-SI': /^(\+386\s?|0)(\d{1}\s?\d{3}\s?\d{2}\s?\d{2}|\d{2}\s?\d{3}\s?\d{3})$/, + 'sk-SK': /^(\+?421)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/, + 'sq-AL': /^(\+355|0)6[789]\d{6}$/, + 'sr-RS': /^(\+3816|06)[- \d]{5,9}$/, + 'sv-SE': /^(\+?46|0)[\s\-]?7[\s\-]?[02369]([\s\-]?\d){7}$/, + 'tg-TJ': /^(\+?992)?[5][5]\d{7}$/, + 'th-TH': /^(\+66|66|0)\d{9}$/, + 'tr-TR': /^(\+?90|0)?5\d{9}$/, + 'tk-TM': /^(\+993|993|8)\d{8}$/, + 'uk-UA': /^(\+?38|8)?0\d{9}$/, + 'uz-UZ': /^(\+?998)?(6[125-79]|7[1-69]|88|9\d)\d{7}$/, + 'vi-VN': /^((\+?84)|0)((3([2-9]))|(5([25689]))|(7([0|6-9]))|(8([1-9]))|(9([0-9])))([0-9]{7})$/, + 'zh-CN': /^((\+|00)86)?(1[3-9]|9[28])\d{9}$/, + 'zh-TW': /^(\+?886\-?|0)?9\d{8}$/, + 'dz-BT': /^(\+?975|0)?(17|16|77|02)\d{6}$/ +}; +/* eslint-enable max-len */ +// aliases + +phones['en-CA'] = phones['en-US']; +phones['fr-CA'] = phones['en-CA']; +phones['fr-BE'] = phones['nl-BE']; +phones['zh-HK'] = phones['en-HK']; +phones['zh-MO'] = phones['en-MO']; +phones['ga-IE'] = phones['en-IE']; +phones['fr-CH'] = phones['de-CH']; +phones['it-CH'] = phones['fr-CH']; +export default function isMobilePhone(str, locale, options) { + assertString(str); + + if (options && options.strictMode && !str.startsWith('+')) { + return false; + } + + if (Array.isArray(locale)) { + return locale.some(function (key) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (phones.hasOwnProperty(key)) { + var phone = phones[key]; + + if (phone.test(str)) { + return true; + } + } + + return false; + }); + } else if (locale in phones) { + return phones[locale].test(str); // alias falsey locale as 'any' + } else if (!locale || locale === 'any') { + for (var key in phones) { + // istanbul ignore else + if (phones.hasOwnProperty(key)) { + var phone = phones[key]; + + if (phone.test(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} +export var locales = Object.keys(phones); \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMongoId.js b/node_modules/validator/es/lib/isMongoId.js new file mode 100644 index 0000000..fc87b89 --- /dev/null +++ b/node_modules/validator/es/lib/isMongoId.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +import isHexadecimal from './isHexadecimal'; +export default function isMongoId(str) { + assertString(str); + return isHexadecimal(str) && str.length === 24; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isMultibyte.js b/node_modules/validator/es/lib/isMultibyte.js new file mode 100644 index 0000000..7a13857 --- /dev/null +++ b/node_modules/validator/es/lib/isMultibyte.js @@ -0,0 +1,10 @@ +import assertString from './util/assertString'; +/* eslint-disable no-control-regex */ + +var multibyte = /[^\x00-\x7F]/; +/* eslint-enable no-control-regex */ + +export default function isMultibyte(str) { + assertString(str); + return multibyte.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isNumeric.js b/node_modules/validator/es/lib/isNumeric.js new file mode 100644 index 0000000..398fb82 --- /dev/null +++ b/node_modules/validator/es/lib/isNumeric.js @@ -0,0 +1,12 @@ +import assertString from './util/assertString'; +import { decimal } from './alpha'; +var numericNoSymbols = /^[0-9]+$/; +export default function isNumeric(str, options) { + assertString(str); + + if (options && options.no_symbols) { + return numericNoSymbols.test(str); + } + + return new RegExp("^[+-]?([0-9]*[".concat((options || {}).locale ? decimal[options.locale] : '.', "])?[0-9]+$")).test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isOctal.js b/node_modules/validator/es/lib/isOctal.js new file mode 100644 index 0000000..3ec51dd --- /dev/null +++ b/node_modules/validator/es/lib/isOctal.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var octal = /^(0o)?[0-7]+$/i; +export default function isOctal(str) { + assertString(str); + return octal.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isPassportNumber.js b/node_modules/validator/es/lib/isPassportNumber.js new file mode 100644 index 0000000..f626e84 --- /dev/null +++ b/node_modules/validator/es/lib/isPassportNumber.js @@ -0,0 +1,126 @@ +import assertString from './util/assertString'; +/** + * Reference: + * https://en.wikipedia.org/ -- Wikipedia + * https://docs.microsoft.com/en-us/microsoft-365/compliance/eu-passport-number -- EU Passport Number + * https://countrycode.org/ -- Country Codes + */ + +var passportRegexByCountryCode = { + AM: /^[A-Z]{2}\d{7}$/, + // ARMENIA + AR: /^[A-Z]{3}\d{6}$/, + // ARGENTINA + AT: /^[A-Z]\d{7}$/, + // AUSTRIA + AU: /^[A-Z]\d{7}$/, + // AUSTRALIA + BE: /^[A-Z]{2}\d{6}$/, + // BELGIUM + BG: /^\d{9}$/, + // BULGARIA + BR: /^[A-Z]{2}\d{6}$/, + // BRAZIL + BY: /^[A-Z]{2}\d{7}$/, + // BELARUS + CA: /^[A-Z]{2}\d{6}$/, + // CANADA + CH: /^[A-Z]\d{7}$/, + // SWITZERLAND + CN: /^G\d{8}$|^E(?![IO])[A-Z0-9]\d{7}$/, + // CHINA [G=Ordinary, E=Electronic] followed by 8-digits, or E followed by any UPPERCASE letter (except I and O) followed by 7 digits + CY: /^[A-Z](\d{6}|\d{8})$/, + // CYPRUS + CZ: /^\d{8}$/, + // CZECH REPUBLIC + DE: /^[CFGHJKLMNPRTVWXYZ0-9]{9}$/, + // GERMANY + DK: /^\d{9}$/, + // DENMARK + DZ: /^\d{9}$/, + // ALGERIA + EE: /^([A-Z]\d{7}|[A-Z]{2}\d{7})$/, + // ESTONIA (K followed by 7-digits), e-passports have 2 UPPERCASE followed by 7 digits + ES: /^[A-Z0-9]{2}([A-Z0-9]?)\d{6}$/, + // SPAIN + FI: /^[A-Z]{2}\d{7}$/, + // FINLAND + FR: /^\d{2}[A-Z]{2}\d{5}$/, + // FRANCE + GB: /^\d{9}$/, + // UNITED KINGDOM + GR: /^[A-Z]{2}\d{7}$/, + // GREECE + HR: /^\d{9}$/, + // CROATIA + HU: /^[A-Z]{2}(\d{6}|\d{7})$/, + // HUNGARY + IE: /^[A-Z0-9]{2}\d{7}$/, + // IRELAND + IN: /^[A-Z]{1}-?\d{7}$/, + // INDIA + ID: /^[A-C]\d{7}$/, + // INDONESIA + IR: /^[A-Z]\d{8}$/, + // IRAN + IS: /^(A)\d{7}$/, + // ICELAND + IT: /^[A-Z0-9]{2}\d{7}$/, + // ITALY + JP: /^[A-Z]{2}\d{7}$/, + // JAPAN + KR: /^[MS]\d{8}$/, + // SOUTH KOREA, REPUBLIC OF KOREA, [S=PS Passports, M=PM Passports] + LT: /^[A-Z0-9]{8}$/, + // LITHUANIA + LU: /^[A-Z0-9]{8}$/, + // LUXEMBURG + LV: /^[A-Z0-9]{2}\d{7}$/, + // LATVIA + LY: /^[A-Z0-9]{8}$/, + // LIBYA + MT: /^\d{7}$/, + // MALTA + MZ: /^([A-Z]{2}\d{7})|(\d{2}[A-Z]{2}\d{5})$/, + // MOZAMBIQUE + MY: /^[AHK]\d{8}$/, + // MALAYSIA + NL: /^[A-Z]{2}[A-Z0-9]{6}\d$/, + // NETHERLANDS + PL: /^[A-Z]{2}\d{7}$/, + // POLAND + PT: /^[A-Z]\d{6}$/, + // PORTUGAL + RO: /^\d{8,9}$/, + // ROMANIA + RU: /^\d{9}$/, + // RUSSIAN FEDERATION + SE: /^\d{8}$/, + // SWEDEN + SL: /^(P)[A-Z]\d{7}$/, + // SLOVANIA + SK: /^[0-9A-Z]\d{7}$/, + // SLOVAKIA + TR: /^[A-Z]\d{8}$/, + // TURKEY + UA: /^[A-Z]{2}\d{6}$/, + // UKRAINE + US: /^\d{9}$/ // UNITED STATES + +}; +/** + * Check if str is a valid passport number + * relative to provided ISO Country Code. + * + * @param {string} str + * @param {string} countryCode + * @return {boolean} + */ + +export default function isPassportNumber(str, countryCode) { + assertString(str); + /** Remove All Whitespaces, Convert to UPPERCASE */ + + var normalizedStr = str.replace(/\s/g, '').toUpperCase(); + return countryCode.toUpperCase() in passportRegexByCountryCode && passportRegexByCountryCode[countryCode].test(normalizedStr); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isPort.js b/node_modules/validator/es/lib/isPort.js new file mode 100644 index 0000000..6490cad --- /dev/null +++ b/node_modules/validator/es/lib/isPort.js @@ -0,0 +1,7 @@ +import isInt from './isInt'; +export default function isPort(str) { + return isInt(str, { + min: 0, + max: 65535 + }); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isPostalCode.js b/node_modules/validator/es/lib/isPostalCode.js new file mode 100644 index 0000000..359fa8f --- /dev/null +++ b/node_modules/validator/es/lib/isPostalCode.js @@ -0,0 +1,96 @@ +import assertString from './util/assertString'; // common patterns + +var threeDigit = /^\d{3}$/; +var fourDigit = /^\d{4}$/; +var fiveDigit = /^\d{5}$/; +var sixDigit = /^\d{6}$/; +var patterns = { + AD: /^AD\d{3}$/, + AT: fourDigit, + AU: fourDigit, + AZ: /^AZ\d{4}$/, + BE: fourDigit, + BG: fourDigit, + BR: /^\d{5}-\d{3}$/, + BY: /2[1-4]{1}\d{4}$/, + CA: /^[ABCEGHJKLMNPRSTVXY]\d[ABCEGHJ-NPRSTV-Z][\s\-]?\d[ABCEGHJ-NPRSTV-Z]\d$/i, + CH: fourDigit, + CN: /^(0[1-7]|1[012356]|2[0-7]|3[0-6]|4[0-7]|5[1-7]|6[1-7]|7[1-5]|8[1345]|9[09])\d{4}$/, + CZ: /^\d{3}\s?\d{2}$/, + DE: fiveDigit, + DK: fourDigit, + DO: fiveDigit, + DZ: fiveDigit, + EE: fiveDigit, + ES: /^(5[0-2]{1}|[0-4]{1}\d{1})\d{3}$/, + FI: fiveDigit, + FR: /^\d{2}\s?\d{3}$/, + GB: /^(gir\s?0aa|[a-z]{1,2}\d[\da-z]?\s?(\d[a-z]{2})?)$/i, + GR: /^\d{3}\s?\d{2}$/, + HR: /^([1-5]\d{4}$)/, + HT: /^HT\d{4}$/, + HU: fourDigit, + ID: fiveDigit, + IE: /^(?!.*(?:o))[A-Za-z]\d[\dw]\s\w{4}$/i, + IL: /^(\d{5}|\d{7})$/, + IN: /^((?!10|29|35|54|55|65|66|86|87|88|89)[1-9][0-9]{5})$/, + IR: /\b(?!(\d)\1{3})[13-9]{4}[1346-9][013-9]{5}\b/, + IS: threeDigit, + IT: fiveDigit, + JP: /^\d{3}\-\d{4}$/, + KE: fiveDigit, + KR: /^(\d{5}|\d{6})$/, + LI: /^(948[5-9]|949[0-7])$/, + LT: /^LT\-\d{5}$/, + LU: fourDigit, + LV: /^LV\-\d{4}$/, + LK: fiveDigit, + MX: fiveDigit, + MT: /^[A-Za-z]{3}\s{0,1}\d{4}$/, + MY: fiveDigit, + NL: /^\d{4}\s?[a-z]{2}$/i, + NO: fourDigit, + NP: /^(10|21|22|32|33|34|44|45|56|57)\d{3}$|^(977)$/i, + NZ: fourDigit, + PL: /^\d{2}\-\d{3}$/, + PR: /^00[679]\d{2}([ -]\d{4})?$/, + PT: /^\d{4}\-\d{3}?$/, + RO: sixDigit, + RU: sixDigit, + SA: fiveDigit, + SE: /^[1-9]\d{2}\s?\d{2}$/, + SG: sixDigit, + SI: fourDigit, + SK: /^\d{3}\s?\d{2}$/, + TH: fiveDigit, + TN: fourDigit, + TW: /^\d{3}(\d{2})?$/, + UA: fiveDigit, + US: /^\d{5}(-\d{4})?$/, + ZA: fourDigit, + ZM: fiveDigit +}; +export var locales = Object.keys(patterns); +export default function isPostalCode(str, locale) { + assertString(str); + + if (locale in patterns) { + return patterns[locale].test(str); + } else if (locale === 'any') { + for (var key in patterns) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (patterns.hasOwnProperty(key)) { + var pattern = patterns[key]; + + if (pattern.test(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isRFC3339.js b/node_modules/validator/es/lib/isRFC3339.js new file mode 100644 index 0000000..b9926ed --- /dev/null +++ b/node_modules/validator/es/lib/isRFC3339.js @@ -0,0 +1,20 @@ +import assertString from './util/assertString'; +/* Based on https://tools.ietf.org/html/rfc3339#section-5.6 */ + +var dateFullYear = /[0-9]{4}/; +var dateMonth = /(0[1-9]|1[0-2])/; +var dateMDay = /([12]\d|0[1-9]|3[01])/; +var timeHour = /([01][0-9]|2[0-3])/; +var timeMinute = /[0-5][0-9]/; +var timeSecond = /([0-5][0-9]|60)/; +var timeSecFrac = /(\.[0-9]+)?/; +var timeNumOffset = new RegExp("[-+]".concat(timeHour.source, ":").concat(timeMinute.source)); +var timeOffset = new RegExp("([zZ]|".concat(timeNumOffset.source, ")")); +var partialTime = new RegExp("".concat(timeHour.source, ":").concat(timeMinute.source, ":").concat(timeSecond.source).concat(timeSecFrac.source)); +var fullDate = new RegExp("".concat(dateFullYear.source, "-").concat(dateMonth.source, "-").concat(dateMDay.source)); +var fullTime = new RegExp("".concat(partialTime.source).concat(timeOffset.source)); +var rfc3339 = new RegExp("^".concat(fullDate.source, "[ tT]").concat(fullTime.source, "$")); +export default function isRFC3339(str) { + assertString(str); + return rfc3339.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isRgbColor.js b/node_modules/validator/es/lib/isRgbColor.js new file mode 100644 index 0000000..2fe2fbb --- /dev/null +++ b/node_modules/validator/es/lib/isRgbColor.js @@ -0,0 +1,15 @@ +import assertString from './util/assertString'; +var rgbColor = /^rgb\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\)$/; +var rgbaColor = /^rgba\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)$/; +var rgbColorPercent = /^rgb\((([0-9]%|[1-9][0-9]%|100%),){2}([0-9]%|[1-9][0-9]%|100%)\)/; +var rgbaColorPercent = /^rgba\((([0-9]%|[1-9][0-9]%|100%),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)/; +export default function isRgbColor(str) { + var includePercentValues = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true; + assertString(str); + + if (!includePercentValues) { + return rgbColor.test(str) || rgbaColor.test(str); + } + + return rgbColor.test(str) || rgbaColor.test(str) || rgbColorPercent.test(str) || rgbaColorPercent.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isSemVer.js b/node_modules/validator/es/lib/isSemVer.js new file mode 100644 index 0000000..f6dd9bd --- /dev/null +++ b/node_modules/validator/es/lib/isSemVer.js @@ -0,0 +1,14 @@ +import assertString from './util/assertString'; +import multilineRegexp from './util/multilineRegex'; +/** + * Regular Expression to match + * semantic versioning (SemVer) + * built from multi-line, multi-parts regexp + * Reference: https://semver.org/ + */ + +var semanticVersioningRegex = multilineRegexp(['^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)', '(?:-((?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*))*))', '?(?:\\+([0-9a-z-]+(?:\\.[0-9a-z-]+)*))?$'], 'i'); +export default function isSemVer(str) { + assertString(str); + return semanticVersioningRegex.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isSlug.js b/node_modules/validator/es/lib/isSlug.js new file mode 100644 index 0000000..b9659ef --- /dev/null +++ b/node_modules/validator/es/lib/isSlug.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var charsetRegex = /^[^\s-_](?!.*?[-_]{2,})[a-z0-9-\\][^\s]*[^-_\s]$/; +export default function isSlug(str) { + assertString(str); + return charsetRegex.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isStrongPassword.js b/node_modules/validator/es/lib/isStrongPassword.js new file mode 100644 index 0000000..0d55633 --- /dev/null +++ b/node_modules/validator/es/lib/isStrongPassword.js @@ -0,0 +1,101 @@ +import merge from './util/merge'; +import assertString from './util/assertString'; +var upperCaseRegex = /^[A-Z]$/; +var lowerCaseRegex = /^[a-z]$/; +var numberRegex = /^[0-9]$/; +var symbolRegex = /^[-#!$@%^&*()_+|~=`{}\[\]:";'<>?,.\/ ]$/; +var defaultOptions = { + minLength: 8, + minLowercase: 1, + minUppercase: 1, + minNumbers: 1, + minSymbols: 1, + returnScore: false, + pointsPerUnique: 1, + pointsPerRepeat: 0.5, + pointsForContainingLower: 10, + pointsForContainingUpper: 10, + pointsForContainingNumber: 10, + pointsForContainingSymbol: 10 +}; +/* Counts number of occurrences of each char in a string + * could be moved to util/ ? +*/ + +function countChars(str) { + var result = {}; + Array.from(str).forEach(function (_char) { + var curVal = result[_char]; + + if (curVal) { + result[_char] += 1; + } else { + result[_char] = 1; + } + }); + return result; +} +/* Return information about a password */ + + +function analyzePassword(password) { + var charMap = countChars(password); + var analysis = { + length: password.length, + uniqueChars: Object.keys(charMap).length, + uppercaseCount: 0, + lowercaseCount: 0, + numberCount: 0, + symbolCount: 0 + }; + Object.keys(charMap).forEach(function (_char2) { + /* istanbul ignore else */ + if (upperCaseRegex.test(_char2)) { + analysis.uppercaseCount += charMap[_char2]; + } else if (lowerCaseRegex.test(_char2)) { + analysis.lowercaseCount += charMap[_char2]; + } else if (numberRegex.test(_char2)) { + analysis.numberCount += charMap[_char2]; + } else if (symbolRegex.test(_char2)) { + analysis.symbolCount += charMap[_char2]; + } + }); + return analysis; +} + +function scorePassword(analysis, scoringOptions) { + var points = 0; + points += analysis.uniqueChars * scoringOptions.pointsPerUnique; + points += (analysis.length - analysis.uniqueChars) * scoringOptions.pointsPerRepeat; + + if (analysis.lowercaseCount > 0) { + points += scoringOptions.pointsForContainingLower; + } + + if (analysis.uppercaseCount > 0) { + points += scoringOptions.pointsForContainingUpper; + } + + if (analysis.numberCount > 0) { + points += scoringOptions.pointsForContainingNumber; + } + + if (analysis.symbolCount > 0) { + points += scoringOptions.pointsForContainingSymbol; + } + + return points; +} + +export default function isStrongPassword(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null; + assertString(str); + var analysis = analyzePassword(str); + options = merge(options || {}, defaultOptions); + + if (options.returnScore) { + return scorePassword(analysis, options); + } + + return analysis.length >= options.minLength && analysis.lowercaseCount >= options.minLowercase && analysis.uppercaseCount >= options.minUppercase && analysis.numberCount >= options.minNumbers && analysis.symbolCount >= options.minSymbols; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isSurrogatePair.js b/node_modules/validator/es/lib/isSurrogatePair.js new file mode 100644 index 0000000..1e0efb2 --- /dev/null +++ b/node_modules/validator/es/lib/isSurrogatePair.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +var surrogatePair = /[\uD800-\uDBFF][\uDC00-\uDFFF]/; +export default function isSurrogatePair(str) { + assertString(str); + return surrogatePair.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isTaxID.js b/node_modules/validator/es/lib/isTaxID.js new file mode 100644 index 0000000..1107cc0 --- /dev/null +++ b/node_modules/validator/es/lib/isTaxID.js @@ -0,0 +1,1508 @@ +function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); } + +function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); } + +function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +import assertString from './util/assertString'; +import * as algorithms from './util/algorithms'; +import isDate from './isDate'; +/** + * TIN Validation + * Validates Tax Identification Numbers (TINs) from the US, EU member states and the United Kingdom. + * + * EU-UK: + * National TIN validity is calculated using public algorithms as made available by DG TAXUD. + * + * See `https://ec.europa.eu/taxation_customs/tin/specs/FS-TIN%20Algorithms-Public.docx` for more information. + * + * US: + * An Employer Identification Number (EIN), also known as a Federal Tax Identification Number, + * is used to identify a business entity. + * + * NOTES: + * - Prefix 47 is being reserved for future use + * - Prefixes 26, 27, 45, 46 and 47 were previously assigned by the Philadelphia campus. + * + * See `http://www.irs.gov/Businesses/Small-Businesses-&-Self-Employed/How-EINs-are-Assigned-and-Valid-EIN-Prefixes` + * for more information. + */ +// Locale functions + +/* + * bg-BG validation function + * (Edinen graždanski nomer (EGN/ЕГН), persons only) + * Checks if birth date (first six digits) is valid and calculates check (last) digit + */ + +function bgBgCheck(tin) { + // Extract full year, normalize month and check birth date validity + var century_year = tin.slice(0, 2); + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 40) { + month -= 40; + century_year = "20".concat(century_year); + } else if (month > 20) { + month -= 20; + century_year = "18".concat(century_year); + } else { + century_year = "19".concat(century_year); + } + + if (month < 10) { + month = "0".concat(month); + } + + var date = "".concat(century_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); // Calculate checksum by multiplying digits with fixed values + + var multip_lookup = [2, 4, 8, 5, 10, 9, 7, 3, 6]; + var checksum = 0; + + for (var i = 0; i < multip_lookup.length; i++) { + checksum += digits[i] * multip_lookup[i]; + } + + checksum = checksum % 11 === 10 ? 0 : checksum % 11; + return checksum === digits[9]; +} +/* + * cs-CZ validation function + * (Rodné číslo (RČ), persons only) + * Checks if birth date (first six digits) is valid and divisibility by 11 + * Material not in DG TAXUD document sourced from: + * -`https://lorenc.info/3MA381/overeni-spravnosti-rodneho-cisla.htm` + * -`https://www.mvcr.cz/clanek/rady-a-sluzby-dokumenty-rodne-cislo.aspx` + */ + + +function csCzCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract full year from TIN length + + var full_year = parseInt(tin.slice(0, 2), 10); + + if (tin.length === 10) { + if (full_year < 54) { + full_year = "20".concat(full_year); + } else { + full_year = "19".concat(full_year); + } + } else { + if (tin.slice(6) === '000') { + return false; + } // Three-zero serial not assigned before 1954 + + + if (full_year < 54) { + full_year = "19".concat(full_year); + } else { + return false; // No 18XX years seen in any of the resources + } + } // Add missing zero if needed + + + if (full_year.length === 3) { + full_year = [full_year.slice(0, 2), '0', full_year.slice(2)].join(''); + } // Extract month from TIN and normalize + + + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 50) { + month -= 50; + } + + if (month > 20) { + // Month-plus-twenty was only introduced in 2004 + if (parseInt(full_year, 10) < 2004) { + return false; + } + + month -= 20; + } + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Verify divisibility by 11 + + + if (tin.length === 10) { + if (parseInt(tin, 10) % 11 !== 0) { + // Some numbers up to and including 1985 are still valid if + // check (last) digit equals 0 and modulo of first 9 digits equals 10 + var checkdigit = parseInt(tin.slice(0, 9), 10) % 11; + + if (parseInt(full_year, 10) < 1986 && checkdigit === 10) { + if (parseInt(tin.slice(9), 10) !== 0) { + return false; + } + } else { + return false; + } + } + } + + return true; +} +/* + * de-AT validation function + * (Abgabenkontonummer, persons/entities) + * Verify TIN validity by calling luhnCheck() + */ + + +function deAtCheck(tin) { + return algorithms.luhnCheck(tin); +} +/* + * de-DE validation function + * (Steueridentifikationsnummer (Steuer-IdNr.), persons only) + * Tests for single duplicate/triplicate value, then calculates ISO 7064 check (last) digit + * Partial implementation of spec (same result with both algorithms always) + */ + + +function deDeCheck(tin) { + // Split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); // Fill array with strings of number positions + + var occurences = []; + + for (var i = 0; i < digits.length - 1; i++) { + occurences.push(''); + + for (var j = 0; j < digits.length - 1; j++) { + if (digits[i] === digits[j]) { + occurences[i] += j; + } + } + } // Remove digits with one occurence and test for only one duplicate/triplicate + + + occurences = occurences.filter(function (a) { + return a.length > 1; + }); + + if (occurences.length !== 2 && occurences.length !== 3) { + return false; + } // In case of triplicate value only two digits are allowed next to each other + + + if (occurences[0].length === 3) { + var trip_locations = occurences[0].split('').map(function (a) { + return parseInt(a, 10); + }); + var recurrent = 0; // Amount of neighbour occurences + + for (var _i = 0; _i < trip_locations.length - 1; _i++) { + if (trip_locations[_i] + 1 === trip_locations[_i + 1]) { + recurrent += 1; + } + } + + if (recurrent === 2) { + return false; + } + } + + return algorithms.iso7064Check(tin); +} +/* + * dk-DK validation function + * (CPR-nummer (personnummer), persons only) + * Checks if birth date (first six digits) is valid and assigned to century (seventh) digit, + * and calculates check (last) digit + */ + + +function dkDkCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract year, check if valid for given century digit and add century + + var year = parseInt(tin.slice(4, 6), 10); + var century_digit = tin.slice(6, 7); + + switch (century_digit) { + case '0': + case '1': + case '2': + case '3': + year = "19".concat(year); + break; + + case '4': + case '9': + if (year < 37) { + year = "20".concat(year); + } else { + year = "19".concat(year); + } + + break; + + default: + if (year < 37) { + year = "20".concat(year); + } else if (year > 58) { + year = "18".concat(year); + } else { + return false; + } + + break; + } // Add missing zero if needed + + + if (year.length === 3) { + year = [year.slice(0, 2), '0', year.slice(2)].join(''); + } // Check date validity + + + var date = "".concat(year, "/").concat(tin.slice(2, 4), "/").concat(tin.slice(0, 2)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + var weight = 4; // Multiply by weight and add to checksum + + for (var i = 0; i < 9; i++) { + checksum += digits[i] * weight; + weight -= 1; + + if (weight === 1) { + weight = 7; + } + } + + checksum %= 11; + + if (checksum === 1) { + return false; + } + + return checksum === 0 ? digits[9] === 0 : digits[9] === 11 - checksum; +} +/* + * el-CY validation function + * (Arithmos Forologikou Mitroou (AFM/ΑΦΜ), persons only) + * Verify TIN validity by calculating ASCII value of check (last) character + */ + + +function elCyCheck(tin) { + // split digits into an array for further processing + var digits = tin.slice(0, 8).split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; // add digits in even places + + for (var i = 1; i < digits.length; i += 2) { + checksum += digits[i]; + } // add digits in odd places + + + for (var _i2 = 0; _i2 < digits.length; _i2 += 2) { + if (digits[_i2] < 2) { + checksum += 1 - digits[_i2]; + } else { + checksum += 2 * (digits[_i2] - 2) + 5; + + if (digits[_i2] > 4) { + checksum += 2; + } + } + } + + return String.fromCharCode(checksum % 26 + 65) === tin.charAt(8); +} +/* + * el-GR validation function + * (Arithmos Forologikou Mitroou (AFM/ΑΦΜ), persons/entities) + * Verify TIN validity by calculating check (last) digit + * Algorithm not in DG TAXUD document- sourced from: + * - `http://epixeirisi.gr/%CE%9A%CE%A1%CE%99%CE%A3%CE%99%CE%9C%CE%91-%CE%98%CE%95%CE%9C%CE%91%CE%A4%CE%91-%CE%A6%CE%9F%CE%A1%CE%9F%CE%9B%CE%9F%CE%93%CE%99%CE%91%CE%A3-%CE%9A%CE%91%CE%99-%CE%9B%CE%9F%CE%93%CE%99%CE%A3%CE%A4%CE%99%CE%9A%CE%97%CE%A3/23791/%CE%91%CF%81%CE%B9%CE%B8%CE%BC%CF%8C%CF%82-%CE%A6%CE%BF%CF%81%CE%BF%CE%BB%CE%BF%CE%B3%CE%B9%CE%BA%CE%BF%CF%8D-%CE%9C%CE%B7%CF%84%CF%81%CF%8E%CE%BF%CF%85` + */ + + +function elGrCheck(tin) { + // split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + + for (var i = 0; i < 8; i++) { + checksum += digits[i] * Math.pow(2, 8 - i); + } + + return checksum % 11 % 10 === digits[8]; +} +/* + * en-GB validation function (should go here if needed) + * (National Insurance Number (NINO) or Unique Taxpayer Reference (UTR), + * persons/entities respectively) + */ + +/* + * en-IE validation function + * (Personal Public Service Number (PPS No), persons only) + * Verify TIN validity by calculating check (second to last) character + */ + + +function enIeCheck(tin) { + var checksum = algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 7).map(function (a) { + return parseInt(a, 10); + }), 8); + + if (tin.length === 9 && tin[8] !== 'W') { + checksum += (tin[8].charCodeAt(0) - 64) * 9; + } + + checksum %= 23; + + if (checksum === 0) { + return tin[7].toUpperCase() === 'W'; + } + + return tin[7].toUpperCase() === String.fromCharCode(64 + checksum); +} // Valid US IRS campus prefixes + + +var enUsCampusPrefix = { + andover: ['10', '12'], + atlanta: ['60', '67'], + austin: ['50', '53'], + brookhaven: ['01', '02', '03', '04', '05', '06', '11', '13', '14', '16', '21', '22', '23', '25', '34', '51', '52', '54', '55', '56', '57', '58', '59', '65'], + cincinnati: ['30', '32', '35', '36', '37', '38', '61'], + fresno: ['15', '24'], + internet: ['20', '26', '27', '45', '46', '47'], + kansas: ['40', '44'], + memphis: ['94', '95'], + ogden: ['80', '90'], + philadelphia: ['33', '39', '41', '42', '43', '46', '48', '62', '63', '64', '66', '68', '71', '72', '73', '74', '75', '76', '77', '81', '82', '83', '84', '85', '86', '87', '88', '91', '92', '93', '98', '99'], + sba: ['31'] +}; // Return an array of all US IRS campus prefixes + +function enUsGetPrefixes() { + var prefixes = []; + + for (var location in enUsCampusPrefix) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (enUsCampusPrefix.hasOwnProperty(location)) { + prefixes.push.apply(prefixes, _toConsumableArray(enUsCampusPrefix[location])); + } + } + + return prefixes; +} +/* + * en-US validation function + * Verify that the TIN starts with a valid IRS campus prefix + */ + + +function enUsCheck(tin) { + return enUsGetPrefixes().indexOf(tin.substr(0, 2)) !== -1; +} +/* + * es-ES validation function + * (Documento Nacional de Identidad (DNI) + * or Número de Identificación de Extranjero (NIE), persons only) + * Verify TIN validity by calculating check (last) character + */ + + +function esEsCheck(tin) { + // Split characters into an array for further processing + var chars = tin.toUpperCase().split(''); // Replace initial letter if needed + + if (isNaN(parseInt(chars[0], 10)) && chars.length > 1) { + var lead_replace = 0; + + switch (chars[0]) { + case 'Y': + lead_replace = 1; + break; + + case 'Z': + lead_replace = 2; + break; + + default: + } + + chars.splice(0, 1, lead_replace); // Fill with zeros if smaller than proper + } else { + while (chars.length < 9) { + chars.unshift(0); + } + } // Calculate checksum and check according to lookup + + + var lookup = ['T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J', 'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E']; + chars = chars.join(''); + var checksum = parseInt(chars.slice(0, 8), 10) % 23; + return chars[8] === lookup[checksum]; +} +/* + * et-EE validation function + * (Isikukood (IK), persons only) + * Checks if birth date (century digit and six following) is valid and calculates check (last) digit + * Material not in DG TAXUD document sourced from: + * - `https://www.oecd.org/tax/automatic-exchange/crs-implementation-and-assistance/tax-identification-numbers/Estonia-TIN.pdf` + */ + + +function etEeCheck(tin) { + // Extract year and add century + var full_year = tin.slice(1, 3); + var century_digit = tin.slice(0, 1); + + switch (century_digit) { + case '1': + case '2': + full_year = "18".concat(full_year); + break; + + case '3': + case '4': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(3, 5), "/").concat(tin.slice(5, 7)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + var weight = 1; // Multiply by weight and add to checksum + + for (var i = 0; i < 10; i++) { + checksum += digits[i] * weight; + weight += 1; + + if (weight === 10) { + weight = 1; + } + } // Do again if modulo 11 of checksum is 10 + + + if (checksum % 11 === 10) { + checksum = 0; + weight = 3; + + for (var _i3 = 0; _i3 < 10; _i3++) { + checksum += digits[_i3] * weight; + weight += 1; + + if (weight === 10) { + weight = 1; + } + } + + if (checksum % 11 === 10) { + return digits[10] === 0; + } + } + + return checksum % 11 === digits[10]; +} +/* + * fi-FI validation function + * (Henkilötunnus (HETU), persons only) + * Checks if birth date (first six digits plus century symbol) is valid + * and calculates check (last) digit + */ + + +function fiFiCheck(tin) { + // Extract year and add century + var full_year = tin.slice(4, 6); + var century_symbol = tin.slice(6, 7); + + switch (century_symbol) { + case '+': + full_year = "18".concat(full_year); + break; + + case '-': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(2, 4), "/").concat(tin.slice(0, 2)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Calculate check character + + + var checksum = parseInt(tin.slice(0, 6) + tin.slice(7, 10), 10) % 31; + + if (checksum < 10) { + return checksum === parseInt(tin.slice(10), 10); + } + + checksum -= 10; + var letters_lookup = ['A', 'B', 'C', 'D', 'E', 'F', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y']; + return letters_lookup[checksum] === tin.slice(10); +} +/* + * fr/nl-BE validation function + * (Numéro national (N.N.), persons only) + * Checks if birth date (first six digits) is valid and calculates check (last two) digits + */ + + +function frBeCheck(tin) { + // Zero month/day value is acceptable + if (tin.slice(2, 4) !== '00' || tin.slice(4, 6) !== '00') { + // Extract date from first six digits of TIN + var date = "".concat(tin.slice(0, 2), "/").concat(tin.slice(2, 4), "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YY/MM/DD')) { + return false; + } + } + + var checksum = 97 - parseInt(tin.slice(0, 9), 10) % 97; + var checkdigits = parseInt(tin.slice(9, 11), 10); + + if (checksum !== checkdigits) { + checksum = 97 - parseInt("2".concat(tin.slice(0, 9)), 10) % 97; + + if (checksum !== checkdigits) { + return false; + } + } + + return true; +} +/* + * fr-FR validation function + * (Numéro fiscal de référence (numéro SPI), persons only) + * Verify TIN validity by calculating check (last three) digits + */ + + +function frFrCheck(tin) { + tin = tin.replace(/\s/g, ''); + var checksum = parseInt(tin.slice(0, 10), 10) % 511; + var checkdigits = parseInt(tin.slice(10, 13), 10); + return checksum === checkdigits; +} +/* + * fr/lb-LU validation function + * (numéro d’identification personnelle, persons only) + * Verify birth date validity and run Luhn and Verhoeff checks + */ + + +function frLuCheck(tin) { + // Extract date and check validity + var date = "".concat(tin.slice(0, 4), "/").concat(tin.slice(4, 6), "/").concat(tin.slice(6, 8)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Run Luhn check + + + if (!algorithms.luhnCheck(tin.slice(0, 12))) { + return false; + } // Remove Luhn check digit and run Verhoeff check + + + return algorithms.verhoeffCheck("".concat(tin.slice(0, 11)).concat(tin[12])); +} +/* + * hr-HR validation function + * (Osobni identifikacijski broj (OIB), persons/entities) + * Verify TIN validity by calling iso7064Check(digits) + */ + + +function hrHrCheck(tin) { + return algorithms.iso7064Check(tin); +} +/* + * hu-HU validation function + * (Adóazonosító jel, persons only) + * Verify TIN validity by calculating check (last) digit + */ + + +function huHuCheck(tin) { + // split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 8; + + for (var i = 1; i < 9; i++) { + checksum += digits[i] * (i + 1); + } + + return checksum % 11 === digits[9]; +} +/* + * lt-LT validation function (should go here if needed) + * (Asmens kodas, persons/entities respectively) + * Current validation check is alias of etEeCheck- same format applies + */ + +/* + * it-IT first/last name validity check + * Accepts it-IT TIN-encoded names as a three-element character array and checks their validity + * Due to lack of clarity between resources ("Are only Italian consonants used? + * What happens if a person has X in their name?" etc.) only two test conditions + * have been implemented: + * Vowels may only be followed by other vowels or an X character + * and X characters after vowels may only be followed by other X characters. + */ + + +function itItNameCheck(name) { + // true at the first occurence of a vowel + var vowelflag = false; // true at the first occurence of an X AFTER vowel + // (to properly handle last names with X as consonant) + + var xflag = false; + + for (var i = 0; i < 3; i++) { + if (!vowelflag && /[AEIOU]/.test(name[i])) { + vowelflag = true; + } else if (!xflag && vowelflag && name[i] === 'X') { + xflag = true; + } else if (i > 0) { + if (vowelflag && !xflag) { + if (!/[AEIOU]/.test(name[i])) { + return false; + } + } + + if (xflag) { + if (!/X/.test(name[i])) { + return false; + } + } + } + } + + return true; +} +/* + * it-IT validation function + * (Codice fiscale (TIN-IT), persons only) + * Verify name, birth date and codice catastale validity + * and calculate check character. + * Material not in DG-TAXUD document sourced from: + * `https://en.wikipedia.org/wiki/Italian_fiscal_code` + */ + + +function itItCheck(tin) { + // Capitalize and split characters into an array for further processing + var chars = tin.toUpperCase().split(''); // Check first and last name validity calling itItNameCheck() + + if (!itItNameCheck(chars.slice(0, 3))) { + return false; + } + + if (!itItNameCheck(chars.slice(3, 6))) { + return false; + } // Convert letters in number spaces back to numbers if any + + + var number_locations = [6, 7, 9, 10, 12, 13, 14]; + var number_replace = { + L: '0', + M: '1', + N: '2', + P: '3', + Q: '4', + R: '5', + S: '6', + T: '7', + U: '8', + V: '9' + }; + + for (var _i4 = 0, _number_locations = number_locations; _i4 < _number_locations.length; _i4++) { + var i = _number_locations[_i4]; + + if (chars[i] in number_replace) { + chars.splice(i, 1, number_replace[chars[i]]); + } + } // Extract month and day, and check date validity + + + var month_replace = { + A: '01', + B: '02', + C: '03', + D: '04', + E: '05', + H: '06', + L: '07', + M: '08', + P: '09', + R: '10', + S: '11', + T: '12' + }; + var month = month_replace[chars[8]]; + var day = parseInt(chars[9] + chars[10], 10); + + if (day > 40) { + day -= 40; + } + + if (day < 10) { + day = "0".concat(day); + } + + var date = "".concat(chars[6]).concat(chars[7], "/").concat(month, "/").concat(day); + + if (!isDate(date, 'YY/MM/DD')) { + return false; + } // Calculate check character by adding up even and odd characters as numbers + + + var checksum = 0; + + for (var _i5 = 1; _i5 < chars.length - 1; _i5 += 2) { + var char_to_int = parseInt(chars[_i5], 10); + + if (isNaN(char_to_int)) { + char_to_int = chars[_i5].charCodeAt(0) - 65; + } + + checksum += char_to_int; + } + + var odd_convert = { + // Maps of characters at odd places + A: 1, + B: 0, + C: 5, + D: 7, + E: 9, + F: 13, + G: 15, + H: 17, + I: 19, + J: 21, + K: 2, + L: 4, + M: 18, + N: 20, + O: 11, + P: 3, + Q: 6, + R: 8, + S: 12, + T: 14, + U: 16, + V: 10, + W: 22, + X: 25, + Y: 24, + Z: 23, + 0: 1, + 1: 0 + }; + + for (var _i6 = 0; _i6 < chars.length - 1; _i6 += 2) { + var _char_to_int = 0; + + if (chars[_i6] in odd_convert) { + _char_to_int = odd_convert[chars[_i6]]; + } else { + var multiplier = parseInt(chars[_i6], 10); + _char_to_int = 2 * multiplier + 1; + + if (multiplier > 4) { + _char_to_int += 2; + } + } + + checksum += _char_to_int; + } + + if (String.fromCharCode(65 + checksum % 26) !== chars[15]) { + return false; + } + + return true; +} +/* + * lv-LV validation function + * (Personas kods (PK), persons only) + * Check validity of birth date and calculate check (last) digit + * Support only for old format numbers (not starting with '32', issued before 2017/07/01) + * Material not in DG TAXUD document sourced from: + * `https://boot.ritakafija.lv/forums/index.php?/topic/88314-personas-koda-algoritms-%C4%8Deksumma/` + */ + + +function lvLvCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract date from TIN + + var day = tin.slice(0, 2); + + if (day !== '32') { + // No date/checksum check if new format + var month = tin.slice(2, 4); + + if (month !== '00') { + // No date check if unknown month + var full_year = tin.slice(4, 6); + + switch (tin[6]) { + case '0': + full_year = "18".concat(full_year); + break; + + case '1': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(2, 4), "/").concat(day); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } + } // Calculate check digit + + + var checksum = 1101; + var multip_lookup = [1, 6, 3, 7, 9, 10, 5, 8, 4, 2]; + + for (var i = 0; i < tin.length - 1; i++) { + checksum -= parseInt(tin[i], 10) * multip_lookup[i]; + } + + return parseInt(tin[10], 10) === checksum % 11; + } + + return true; +} +/* + * mt-MT validation function + * (Identity Card Number or Unique Taxpayer Reference, persons/entities) + * Verify Identity Card Number structure (no other tests found) + */ + + +function mtMtCheck(tin) { + if (tin.length !== 9) { + // No tests for UTR + var chars = tin.toUpperCase().split(''); // Fill with zeros if smaller than proper + + while (chars.length < 8) { + chars.unshift(0); + } // Validate format according to last character + + + switch (tin[7]) { + case 'A': + case 'P': + if (parseInt(chars[6], 10) === 0) { + return false; + } + + break; + + default: + { + var first_part = parseInt(chars.join('').slice(0, 5), 10); + + if (first_part > 32000) { + return false; + } + + var second_part = parseInt(chars.join('').slice(5, 7), 10); + + if (first_part === second_part) { + return false; + } + } + } + } + + return true; +} +/* + * nl-NL validation function + * (Burgerservicenummer (BSN) or Rechtspersonen Samenwerkingsverbanden Informatie Nummer (RSIN), + * persons/entities respectively) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function nlNlCheck(tin) { + return algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 8).map(function (a) { + return parseInt(a, 10); + }), 9) % 11 === parseInt(tin[8], 10); +} +/* + * pl-PL validation function + * (Powszechny Elektroniczny System Ewidencji Ludności (PESEL) + * or Numer identyfikacji podatkowej (NIP), persons/entities) + * Verify TIN validity by validating birth date (PESEL) and calculating check (last) digit + */ + + +function plPlCheck(tin) { + // NIP + if (tin.length === 10) { + // Calculate last digit by multiplying with lookup + var lookup = [6, 5, 7, 2, 3, 4, 5, 6, 7]; + var _checksum = 0; + + for (var i = 0; i < lookup.length; i++) { + _checksum += parseInt(tin[i], 10) * lookup[i]; + } + + _checksum %= 11; + + if (_checksum === 10) { + return false; + } + + return _checksum === parseInt(tin[9], 10); + } // PESEL + // Extract full year using month + + + var full_year = tin.slice(0, 2); + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 80) { + full_year = "18".concat(full_year); + month -= 80; + } else if (month > 60) { + full_year = "22".concat(full_year); + month -= 60; + } else if (month > 40) { + full_year = "21".concat(full_year); + month -= 40; + } else if (month > 20) { + full_year = "20".concat(full_year); + month -= 20; + } else { + full_year = "19".concat(full_year); + } // Add leading zero to month if needed + + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Calculate last digit by mulitplying with odd one-digit numbers except 5 + + + var checksum = 0; + var multiplier = 1; + + for (var _i7 = 0; _i7 < tin.length - 1; _i7++) { + checksum += parseInt(tin[_i7], 10) * multiplier % 10; + multiplier += 2; + + if (multiplier > 10) { + multiplier = 1; + } else if (multiplier === 5) { + multiplier += 2; + } + } + + checksum = 10 - checksum % 10; + return checksum === parseInt(tin[10], 10); +} +/* +* pt-BR validation function +* (Cadastro de Pessoas Físicas (CPF, persons) +* Cadastro Nacional de Pessoas Jurídicas (CNPJ, entities) +* Both inputs will be validated +*/ + + +function ptBrCheck(tin) { + if (tin.length === 11) { + var _sum; + + var remainder; + _sum = 0; + if ( // Reject known invalid CPFs + tin === '11111111111' || tin === '22222222222' || tin === '33333333333' || tin === '44444444444' || tin === '55555555555' || tin === '66666666666' || tin === '77777777777' || tin === '88888888888' || tin === '99999999999' || tin === '00000000000') return false; + + for (var i = 1; i <= 9; i++) { + _sum += parseInt(tin.substring(i - 1, i), 10) * (11 - i); + } + + remainder = _sum * 10 % 11; + if (remainder === 10) remainder = 0; + if (remainder !== parseInt(tin.substring(9, 10), 10)) return false; + _sum = 0; + + for (var _i8 = 1; _i8 <= 10; _i8++) { + _sum += parseInt(tin.substring(_i8 - 1, _i8), 10) * (12 - _i8); + } + + remainder = _sum * 10 % 11; + if (remainder === 10) remainder = 0; + if (remainder !== parseInt(tin.substring(10, 11), 10)) return false; + return true; + } + + if ( // Reject know invalid CNPJs + tin === '00000000000000' || tin === '11111111111111' || tin === '22222222222222' || tin === '33333333333333' || tin === '44444444444444' || tin === '55555555555555' || tin === '66666666666666' || tin === '77777777777777' || tin === '88888888888888' || tin === '99999999999999') { + return false; + } + + var length = tin.length - 2; + var identifiers = tin.substring(0, length); + var verificators = tin.substring(length); + var sum = 0; + var pos = length - 7; + + for (var _i9 = length; _i9 >= 1; _i9--) { + sum += identifiers.charAt(length - _i9) * pos; + pos -= 1; + + if (pos < 2) { + pos = 9; + } + } + + var result = sum % 11 < 2 ? 0 : 11 - sum % 11; + + if (result !== parseInt(verificators.charAt(0), 10)) { + return false; + } + + length += 1; + identifiers = tin.substring(0, length); + sum = 0; + pos = length - 7; + + for (var _i10 = length; _i10 >= 1; _i10--) { + sum += identifiers.charAt(length - _i10) * pos; + pos -= 1; + + if (pos < 2) { + pos = 9; + } + } + + result = sum % 11 < 2 ? 0 : 11 - sum % 11; + + if (result !== parseInt(verificators.charAt(1), 10)) { + return false; + } + + return true; +} +/* + * pt-PT validation function + * (Número de identificação fiscal (NIF), persons/entities) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function ptPtCheck(tin) { + var checksum = 11 - algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 8).map(function (a) { + return parseInt(a, 10); + }), 9) % 11; + + if (checksum > 9) { + return parseInt(tin[8], 10) === 0; + } + + return checksum === parseInt(tin[8], 10); +} +/* + * ro-RO validation function + * (Cod Numeric Personal (CNP) or Cod de înregistrare fiscală (CIF), + * persons only) + * Verify CNP validity by calculating check (last) digit (test not found for CIF) + * Material not in DG TAXUD document sourced from: + * `https://en.wikipedia.org/wiki/National_identification_number#Romania` + */ + + +function roRoCheck(tin) { + if (tin.slice(0, 4) !== '9000') { + // No test found for this format + // Extract full year using century digit if possible + var full_year = tin.slice(1, 3); + + switch (tin[0]) { + case '1': + case '2': + full_year = "19".concat(full_year); + break; + + case '3': + case '4': + full_year = "18".concat(full_year); + break; + + case '5': + case '6': + full_year = "20".concat(full_year); + break; + + default: + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(3, 5), "/").concat(tin.slice(5, 7)); + + if (date.length === 8) { + if (!isDate(date, 'YY/MM/DD')) { + return false; + } + } else if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Calculate check digit + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var multipliers = [2, 7, 9, 1, 4, 6, 3, 5, 8, 2, 7, 9]; + var checksum = 0; + + for (var i = 0; i < multipliers.length; i++) { + checksum += digits[i] * multipliers[i]; + } + + if (checksum % 11 === 10) { + return digits[12] === 1; + } + + return digits[12] === checksum % 11; + } + + return true; +} +/* + * sk-SK validation function + * (Rodné číslo (RČ) or bezvýznamové identifikačné číslo (BIČ), persons only) + * Checks validity of pre-1954 birth numbers (rodné číslo) only + * Due to the introduction of the pseudo-random BIČ it is not possible to test + * post-1954 birth numbers without knowing whether they are BIČ or RČ beforehand + */ + + +function skSkCheck(tin) { + if (tin.length === 9) { + tin = tin.replace(/\W/, ''); + + if (tin.slice(6) === '000') { + return false; + } // Three-zero serial not assigned before 1954 + // Extract full year from TIN length + + + var full_year = parseInt(tin.slice(0, 2), 10); + + if (full_year > 53) { + return false; + } + + if (full_year < 10) { + full_year = "190".concat(full_year); + } else { + full_year = "19".concat(full_year); + } // Extract month from TIN and normalize + + + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 50) { + month -= 50; + } + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } + } + + return true; +} +/* + * sl-SI validation function + * (Davčna številka, persons/entities) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function slSiCheck(tin) { + var checksum = 11 - algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 7).map(function (a) { + return parseInt(a, 10); + }), 8) % 11; + + if (checksum === 10) { + return parseInt(tin[7], 10) === 0; + } + + return checksum === parseInt(tin[7], 10); +} +/* + * sv-SE validation function + * (Personnummer or samordningsnummer, persons only) + * Checks validity of birth date and calls luhnCheck() to validate check (last) digit + */ + + +function svSeCheck(tin) { + // Make copy of TIN and normalize to two-digit year form + var tin_copy = tin.slice(0); + + if (tin.length > 11) { + tin_copy = tin_copy.slice(2); + } // Extract date of birth + + + var full_year = ''; + var month = tin_copy.slice(2, 4); + var day = parseInt(tin_copy.slice(4, 6), 10); + + if (tin.length > 11) { + full_year = tin.slice(0, 4); + } else { + full_year = tin.slice(0, 2); + + if (tin.length === 11 && day < 60) { + // Extract full year from centenarian symbol + // Should work just fine until year 10000 or so + var current_year = new Date().getFullYear().toString(); + var current_century = parseInt(current_year.slice(0, 2), 10); + current_year = parseInt(current_year, 10); + + if (tin[6] === '-') { + if (parseInt("".concat(current_century).concat(full_year), 10) > current_year) { + full_year = "".concat(current_century - 1).concat(full_year); + } else { + full_year = "".concat(current_century).concat(full_year); + } + } else { + full_year = "".concat(current_century - 1).concat(full_year); + + if (current_year - parseInt(full_year, 10) < 100) { + return false; + } + } + } + } // Normalize day and check date validity + + + if (day > 60) { + day -= 60; + } + + if (day < 10) { + day = "0".concat(day); + } + + var date = "".concat(full_year, "/").concat(month, "/").concat(day); + + if (date.length === 8) { + if (!isDate(date, 'YY/MM/DD')) { + return false; + } + } else if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } + + return algorithms.luhnCheck(tin.replace(/\W/, '')); +} // Locale lookup objects + +/* + * Tax id regex formats for various locales + * + * Where not explicitly specified in DG-TAXUD document both + * uppercase and lowercase letters are acceptable. + */ + + +var taxIdFormat = { + 'bg-BG': /^\d{10}$/, + 'cs-CZ': /^\d{6}\/{0,1}\d{3,4}$/, + 'de-AT': /^\d{9}$/, + 'de-DE': /^[1-9]\d{10}$/, + 'dk-DK': /^\d{6}-{0,1}\d{4}$/, + 'el-CY': /^[09]\d{7}[A-Z]$/, + 'el-GR': /^([0-4]|[7-9])\d{8}$/, + 'en-GB': /^\d{10}$|^(?!GB|NK|TN|ZZ)(?![DFIQUV])[A-Z](?![DFIQUVO])[A-Z]\d{6}[ABCD ]$/i, + 'en-IE': /^\d{7}[A-W][A-IW]{0,1}$/i, + 'en-US': /^\d{2}[- ]{0,1}\d{7}$/, + 'es-ES': /^(\d{0,8}|[XYZKLM]\d{7})[A-HJ-NP-TV-Z]$/i, + 'et-EE': /^[1-6]\d{6}(00[1-9]|0[1-9][0-9]|[1-6][0-9]{2}|70[0-9]|710)\d$/, + 'fi-FI': /^\d{6}[-+A]\d{3}[0-9A-FHJ-NPR-Y]$/i, + 'fr-BE': /^\d{11}$/, + 'fr-FR': /^[0-3]\d{12}$|^[0-3]\d\s\d{2}(\s\d{3}){3}$/, + // Conforms both to official spec and provided example + 'fr-LU': /^\d{13}$/, + 'hr-HR': /^\d{11}$/, + 'hu-HU': /^8\d{9}$/, + 'it-IT': /^[A-Z]{6}[L-NP-V0-9]{2}[A-EHLMPRST][L-NP-V0-9]{2}[A-ILMZ][L-NP-V0-9]{3}[A-Z]$/i, + 'lv-LV': /^\d{6}-{0,1}\d{5}$/, + // Conforms both to DG TAXUD spec and original research + 'mt-MT': /^\d{3,7}[APMGLHBZ]$|^([1-8])\1\d{7}$/i, + 'nl-NL': /^\d{9}$/, + 'pl-PL': /^\d{10,11}$/, + 'pt-BR': /(?:^\d{11}$)|(?:^\d{14}$)/, + 'pt-PT': /^\d{9}$/, + 'ro-RO': /^\d{13}$/, + 'sk-SK': /^\d{6}\/{0,1}\d{3,4}$/, + 'sl-SI': /^[1-9]\d{7}$/, + 'sv-SE': /^(\d{6}[-+]{0,1}\d{4}|(18|19|20)\d{6}[-+]{0,1}\d{4})$/ +}; // taxIdFormat locale aliases + +taxIdFormat['lb-LU'] = taxIdFormat['fr-LU']; +taxIdFormat['lt-LT'] = taxIdFormat['et-EE']; +taxIdFormat['nl-BE'] = taxIdFormat['fr-BE']; // Algorithmic tax id check functions for various locales + +var taxIdCheck = { + 'bg-BG': bgBgCheck, + 'cs-CZ': csCzCheck, + 'de-AT': deAtCheck, + 'de-DE': deDeCheck, + 'dk-DK': dkDkCheck, + 'el-CY': elCyCheck, + 'el-GR': elGrCheck, + 'en-IE': enIeCheck, + 'en-US': enUsCheck, + 'es-ES': esEsCheck, + 'et-EE': etEeCheck, + 'fi-FI': fiFiCheck, + 'fr-BE': frBeCheck, + 'fr-FR': frFrCheck, + 'fr-LU': frLuCheck, + 'hr-HR': hrHrCheck, + 'hu-HU': huHuCheck, + 'it-IT': itItCheck, + 'lv-LV': lvLvCheck, + 'mt-MT': mtMtCheck, + 'nl-NL': nlNlCheck, + 'pl-PL': plPlCheck, + 'pt-BR': ptBrCheck, + 'pt-PT': ptPtCheck, + 'ro-RO': roRoCheck, + 'sk-SK': skSkCheck, + 'sl-SI': slSiCheck, + 'sv-SE': svSeCheck +}; // taxIdCheck locale aliases + +taxIdCheck['lb-LU'] = taxIdCheck['fr-LU']; +taxIdCheck['lt-LT'] = taxIdCheck['et-EE']; +taxIdCheck['nl-BE'] = taxIdCheck['fr-BE']; // Regexes for locales where characters should be omitted before checking format + +var allsymbols = /[-\\\/!@#$%\^&\*\(\)\+\=\[\]]+/g; +var sanitizeRegexes = { + 'de-AT': allsymbols, + 'de-DE': /[\/\\]/g, + 'fr-BE': allsymbols +}; // sanitizeRegexes locale aliases + +sanitizeRegexes['nl-BE'] = sanitizeRegexes['fr-BE']; +/* + * Validator function + * Return true if the passed string is a valid tax identification number + * for the specified locale. + * Throw an error exception if the locale is not supported. + */ + +export default function isTaxID(str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + assertString(str); // Copy TIN to avoid replacement if sanitized + + var strcopy = str.slice(0); + + if (locale in taxIdFormat) { + if (locale in sanitizeRegexes) { + strcopy = strcopy.replace(sanitizeRegexes[locale], ''); + } + + if (!taxIdFormat[locale].test(strcopy)) { + return false; + } + + if (locale in taxIdCheck) { + return taxIdCheck[locale](strcopy); + } // Fallthrough; not all locales have algorithmic checks + + + return true; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isURL.js b/node_modules/validator/es/lib/isURL.js new file mode 100644 index 0000000..e73d25d --- /dev/null +++ b/node_modules/validator/es/lib/isURL.js @@ -0,0 +1,193 @@ +function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); } + +function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +function _iterableToArrayLimit(arr, i) { if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } + +function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } + +import assertString from './util/assertString'; +import isFQDN from './isFQDN'; +import isIP from './isIP'; +import merge from './util/merge'; +/* +options for isURL method + +require_protocol - if set as true isURL will return false if protocol is not present in the URL +require_valid_protocol - isURL will check if the URL's protocol is present in the protocols option +protocols - valid protocols can be modified with this option +require_host - if set as false isURL will not check if host is present in the URL +require_port - if set as true isURL will check if port is present in the URL +allow_protocol_relative_urls - if set as true protocol relative URLs will be allowed +validate_length - if set as false isURL will skip string length validation (IE maximum is 2083) + +*/ + +var default_url_options = { + protocols: ['http', 'https', 'ftp'], + require_tld: true, + require_protocol: false, + require_host: true, + require_port: false, + require_valid_protocol: true, + allow_underscores: false, + allow_trailing_dot: false, + allow_protocol_relative_urls: false, + allow_fragments: true, + allow_query_components: true, + validate_length: true +}; +var wrapped_ipv6 = /^\[([^\]]+)\](?::([0-9]+))?$/; + +function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +} + +function checkHost(host, matches) { + for (var i = 0; i < matches.length; i++) { + var match = matches[i]; + + if (host === match || isRegExp(match) && match.test(host)) { + return true; + } + } + + return false; +} + +export default function isURL(url, options) { + assertString(url); + + if (!url || /[\s<>]/.test(url)) { + return false; + } + + if (url.indexOf('mailto:') === 0) { + return false; + } + + options = merge(options, default_url_options); + + if (options.validate_length && url.length >= 2083) { + return false; + } + + if (!options.allow_fragments && url.includes('#')) { + return false; + } + + if (!options.allow_query_components && (url.includes('?') || url.includes('&'))) { + return false; + } + + var protocol, auth, host, hostname, port, port_str, split, ipv6; + split = url.split('#'); + url = split.shift(); + split = url.split('?'); + url = split.shift(); + split = url.split('://'); + + if (split.length > 1) { + protocol = split.shift().toLowerCase(); + + if (options.require_valid_protocol && options.protocols.indexOf(protocol) === -1) { + return false; + } + } else if (options.require_protocol) { + return false; + } else if (url.substr(0, 2) === '//') { + if (!options.allow_protocol_relative_urls) { + return false; + } + + split[0] = url.substr(2); + } + + url = split.join('://'); + + if (url === '') { + return false; + } + + split = url.split('/'); + url = split.shift(); + + if (url === '' && !options.require_host) { + return true; + } + + split = url.split('@'); + + if (split.length > 1) { + if (options.disallow_auth) { + return false; + } + + if (split[0] === '') { + return false; + } + + auth = split.shift(); + + if (auth.indexOf(':') >= 0 && auth.split(':').length > 2) { + return false; + } + + var _auth$split = auth.split(':'), + _auth$split2 = _slicedToArray(_auth$split, 2), + user = _auth$split2[0], + password = _auth$split2[1]; + + if (user === '' && password === '') { + return false; + } + } + + hostname = split.join('@'); + port_str = null; + ipv6 = null; + var ipv6_match = hostname.match(wrapped_ipv6); + + if (ipv6_match) { + host = ''; + ipv6 = ipv6_match[1]; + port_str = ipv6_match[2] || null; + } else { + split = hostname.split(':'); + host = split.shift(); + + if (split.length) { + port_str = split.join(':'); + } + } + + if (port_str !== null && port_str.length > 0) { + port = parseInt(port_str, 10); + + if (!/^[0-9]+$/.test(port_str) || port <= 0 || port > 65535) { + return false; + } + } else if (options.require_port) { + return false; + } + + if (options.host_whitelist) { + return checkHost(host, options.host_whitelist); + } + + if (!isIP(host) && !isFQDN(host, options) && (!ipv6 || !isIP(ipv6, 6))) { + return false; + } + + host = host || ipv6; + + if (options.host_blacklist && checkHost(host, options.host_blacklist)) { + return false; + } + + return true; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isUUID.js b/node_modules/validator/es/lib/isUUID.js new file mode 100644 index 0000000..33ea146 --- /dev/null +++ b/node_modules/validator/es/lib/isUUID.js @@ -0,0 +1,14 @@ +import assertString from './util/assertString'; +var uuid = { + 1: /^[0-9A-F]{8}-[0-9A-F]{4}-1[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 2: /^[0-9A-F]{8}-[0-9A-F]{4}-2[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 3: /^[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 4: /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, + 5: /^[0-9A-F]{8}-[0-9A-F]{4}-5[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, + all: /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i +}; +export default function isUUID(str, version) { + assertString(str); + var pattern = uuid[![undefined, null].includes(version) ? version : 'all']; + return !!pattern && pattern.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isUppercase.js b/node_modules/validator/es/lib/isUppercase.js new file mode 100644 index 0000000..fca8790 --- /dev/null +++ b/node_modules/validator/es/lib/isUppercase.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function isUppercase(str) { + assertString(str); + return str === str.toUpperCase(); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isVAT.js b/node_modules/validator/es/lib/isVAT.js new file mode 100644 index 0000000..a211274 --- /dev/null +++ b/node_modules/validator/es/lib/isVAT.js @@ -0,0 +1,16 @@ +import assertString from './util/assertString'; +export var vatMatchers = { + GB: /^GB((\d{3} \d{4} ([0-8][0-9]|9[0-6]))|(\d{9} \d{3})|(((GD[0-4])|(HA[5-9]))[0-9]{2}))$/, + IT: /^(IT)?[0-9]{11}$/, + NL: /^(NL)?[0-9]{9}B[0-9]{2}$/ +}; +export default function isVAT(str, countryCode) { + assertString(str); + assertString(countryCode); + + if (countryCode in vatMatchers) { + return vatMatchers[countryCode].test(str); + } + + throw new Error("Invalid country code: '".concat(countryCode, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isVariableWidth.js b/node_modules/validator/es/lib/isVariableWidth.js new file mode 100644 index 0000000..890119e --- /dev/null +++ b/node_modules/validator/es/lib/isVariableWidth.js @@ -0,0 +1,7 @@ +import assertString from './util/assertString'; +import { fullWidth } from './isFullWidth'; +import { halfWidth } from './isHalfWidth'; +export default function isVariableWidth(str) { + assertString(str); + return fullWidth.test(str) && halfWidth.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/isWhitelisted.js b/node_modules/validator/es/lib/isWhitelisted.js new file mode 100644 index 0000000..2cbb95c --- /dev/null +++ b/node_modules/validator/es/lib/isWhitelisted.js @@ -0,0 +1,12 @@ +import assertString from './util/assertString'; +export default function isWhitelisted(str, chars) { + assertString(str); + + for (var i = str.length - 1; i >= 0; i--) { + if (chars.indexOf(str[i]) === -1) { + return false; + } + } + + return true; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/ltrim.js b/node_modules/validator/es/lib/ltrim.js new file mode 100644 index 0000000..0ca3abb --- /dev/null +++ b/node_modules/validator/es/lib/ltrim.js @@ -0,0 +1,7 @@ +import assertString from './util/assertString'; +export default function ltrim(str, chars) { + assertString(str); // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping + + var pattern = chars ? new RegExp("^[".concat(chars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), "]+"), 'g') : /^\s+/g; + return str.replace(pattern, ''); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/matches.js b/node_modules/validator/es/lib/matches.js new file mode 100644 index 0000000..7840b1c --- /dev/null +++ b/node_modules/validator/es/lib/matches.js @@ -0,0 +1,10 @@ +import assertString from './util/assertString'; +export default function matches(str, pattern, modifiers) { + assertString(str); + + if (Object.prototype.toString.call(pattern) !== '[object RegExp]') { + pattern = new RegExp(pattern, modifiers); + } + + return pattern.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/normalizeEmail.js b/node_modules/validator/es/lib/normalizeEmail.js new file mode 100644 index 0000000..3563982 --- /dev/null +++ b/node_modules/validator/es/lib/normalizeEmail.js @@ -0,0 +1,138 @@ +import merge from './util/merge'; +var default_normalize_email_options = { + // The following options apply to all email addresses + // Lowercases the local part of the email address. + // Please note this may violate RFC 5321 as per http://stackoverflow.com/a/9808332/192024). + // The domain is always lowercased, as per RFC 1035 + all_lowercase: true, + // The following conversions are specific to GMail + // Lowercases the local part of the GMail address (known to be case-insensitive) + gmail_lowercase: true, + // Removes dots from the local part of the email address, as that's ignored by GMail + gmail_remove_dots: true, + // Removes the subaddress (e.g. "+foo") from the email address + gmail_remove_subaddress: true, + // Conversts the googlemail.com domain to gmail.com + gmail_convert_googlemaildotcom: true, + // The following conversions are specific to Outlook.com / Windows Live / Hotmail + // Lowercases the local part of the Outlook.com address (known to be case-insensitive) + outlookdotcom_lowercase: true, + // Removes the subaddress (e.g. "+foo") from the email address + outlookdotcom_remove_subaddress: true, + // The following conversions are specific to Yahoo + // Lowercases the local part of the Yahoo address (known to be case-insensitive) + yahoo_lowercase: true, + // Removes the subaddress (e.g. "-foo") from the email address + yahoo_remove_subaddress: true, + // The following conversions are specific to Yandex + // Lowercases the local part of the Yandex address (known to be case-insensitive) + yandex_lowercase: true, + // The following conversions are specific to iCloud + // Lowercases the local part of the iCloud address (known to be case-insensitive) + icloud_lowercase: true, + // Removes the subaddress (e.g. "+foo") from the email address + icloud_remove_subaddress: true +}; // List of domains used by iCloud + +var icloud_domains = ['icloud.com', 'me.com']; // List of domains used by Outlook.com and its predecessors +// This list is likely incomplete. +// Partial reference: +// https://blogs.office.com/2013/04/17/outlook-com-gets-two-step-verification-sign-in-by-alias-and-new-international-domains/ + +var outlookdotcom_domains = ['hotmail.at', 'hotmail.be', 'hotmail.ca', 'hotmail.cl', 'hotmail.co.il', 'hotmail.co.nz', 'hotmail.co.th', 'hotmail.co.uk', 'hotmail.com', 'hotmail.com.ar', 'hotmail.com.au', 'hotmail.com.br', 'hotmail.com.gr', 'hotmail.com.mx', 'hotmail.com.pe', 'hotmail.com.tr', 'hotmail.com.vn', 'hotmail.cz', 'hotmail.de', 'hotmail.dk', 'hotmail.es', 'hotmail.fr', 'hotmail.hu', 'hotmail.id', 'hotmail.ie', 'hotmail.in', 'hotmail.it', 'hotmail.jp', 'hotmail.kr', 'hotmail.lv', 'hotmail.my', 'hotmail.ph', 'hotmail.pt', 'hotmail.sa', 'hotmail.sg', 'hotmail.sk', 'live.be', 'live.co.uk', 'live.com', 'live.com.ar', 'live.com.mx', 'live.de', 'live.es', 'live.eu', 'live.fr', 'live.it', 'live.nl', 'msn.com', 'outlook.at', 'outlook.be', 'outlook.cl', 'outlook.co.il', 'outlook.co.nz', 'outlook.co.th', 'outlook.com', 'outlook.com.ar', 'outlook.com.au', 'outlook.com.br', 'outlook.com.gr', 'outlook.com.pe', 'outlook.com.tr', 'outlook.com.vn', 'outlook.cz', 'outlook.de', 'outlook.dk', 'outlook.es', 'outlook.fr', 'outlook.hu', 'outlook.id', 'outlook.ie', 'outlook.in', 'outlook.it', 'outlook.jp', 'outlook.kr', 'outlook.lv', 'outlook.my', 'outlook.ph', 'outlook.pt', 'outlook.sa', 'outlook.sg', 'outlook.sk', 'passport.com']; // List of domains used by Yahoo Mail +// This list is likely incomplete + +var yahoo_domains = ['rocketmail.com', 'yahoo.ca', 'yahoo.co.uk', 'yahoo.com', 'yahoo.de', 'yahoo.fr', 'yahoo.in', 'yahoo.it', 'ymail.com']; // List of domains used by yandex.ru + +var yandex_domains = ['yandex.ru', 'yandex.ua', 'yandex.kz', 'yandex.com', 'yandex.by', 'ya.ru']; // replace single dots, but not multiple consecutive dots + +function dotsReplacer(match) { + if (match.length > 1) { + return match; + } + + return ''; +} + +export default function normalizeEmail(email, options) { + options = merge(options, default_normalize_email_options); + var raw_parts = email.split('@'); + var domain = raw_parts.pop(); + var user = raw_parts.join('@'); + var parts = [user, domain]; // The domain is always lowercased, as it's case-insensitive per RFC 1035 + + parts[1] = parts[1].toLowerCase(); + + if (parts[1] === 'gmail.com' || parts[1] === 'googlemail.com') { + // Address is GMail + if (options.gmail_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (options.gmail_remove_dots) { + // this does not replace consecutive dots like example..email@gmail.com + parts[0] = parts[0].replace(/\.+/g, dotsReplacer); + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.gmail_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + + parts[1] = options.gmail_convert_googlemaildotcom ? 'gmail.com' : parts[1]; + } else if (icloud_domains.indexOf(parts[1]) >= 0) { + // Address is iCloud + if (options.icloud_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.icloud_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (outlookdotcom_domains.indexOf(parts[1]) >= 0) { + // Address is Outlook.com + if (options.outlookdotcom_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.outlookdotcom_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (yahoo_domains.indexOf(parts[1]) >= 0) { + // Address is Yahoo + if (options.yahoo_remove_subaddress) { + var components = parts[0].split('-'); + parts[0] = components.length > 1 ? components.slice(0, -1).join('-') : components[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.yahoo_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (yandex_domains.indexOf(parts[1]) >= 0) { + if (options.all_lowercase || options.yandex_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + + parts[1] = 'yandex.ru'; // all yandex domains are equal, 1st preferred + } else if (options.all_lowercase) { + // Any other address + parts[0] = parts[0].toLowerCase(); + } + + return parts.join('@'); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/rtrim.js b/node_modules/validator/es/lib/rtrim.js new file mode 100644 index 0000000..a02f5c4 --- /dev/null +++ b/node_modules/validator/es/lib/rtrim.js @@ -0,0 +1,19 @@ +import assertString from './util/assertString'; +export default function rtrim(str, chars) { + assertString(str); + + if (chars) { + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping + var pattern = new RegExp("[".concat(chars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), "]+$"), 'g'); + return str.replace(pattern, ''); + } // Use a faster and more safe than regex trim method https://blog.stevenlevithan.com/archives/faster-trim-javascript + + + var strIndex = str.length - 1; + + while (/\s/.test(str.charAt(strIndex))) { + strIndex -= 1; + } + + return str.slice(0, strIndex + 1); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/stripLow.js b/node_modules/validator/es/lib/stripLow.js new file mode 100644 index 0000000..c798425 --- /dev/null +++ b/node_modules/validator/es/lib/stripLow.js @@ -0,0 +1,7 @@ +import assertString from './util/assertString'; +import blacklist from './blacklist'; +export default function stripLow(str, keep_new_lines) { + assertString(str); + var chars = keep_new_lines ? '\\x00-\\x09\\x0B\\x0C\\x0E-\\x1F\\x7F' : '\\x00-\\x1F\\x7F'; + return blacklist(str, chars); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/toBoolean.js b/node_modules/validator/es/lib/toBoolean.js new file mode 100644 index 0000000..25e1eac --- /dev/null +++ b/node_modules/validator/es/lib/toBoolean.js @@ -0,0 +1,10 @@ +import assertString from './util/assertString'; +export default function toBoolean(str, strict) { + assertString(str); + + if (strict) { + return str === '1' || /^true$/i.test(str); + } + + return str !== '0' && !/^false$/i.test(str) && str !== ''; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/toDate.js b/node_modules/validator/es/lib/toDate.js new file mode 100644 index 0000000..62422a3 --- /dev/null +++ b/node_modules/validator/es/lib/toDate.js @@ -0,0 +1,6 @@ +import assertString from './util/assertString'; +export default function toDate(date) { + assertString(date); + date = Date.parse(date); + return !isNaN(date) ? new Date(date) : null; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/toFloat.js b/node_modules/validator/es/lib/toFloat.js new file mode 100644 index 0000000..f21163d --- /dev/null +++ b/node_modules/validator/es/lib/toFloat.js @@ -0,0 +1,5 @@ +import isFloat from './isFloat'; +export default function toFloat(str) { + if (!isFloat(str)) return NaN; + return parseFloat(str); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/toInt.js b/node_modules/validator/es/lib/toInt.js new file mode 100644 index 0000000..22d566e --- /dev/null +++ b/node_modules/validator/es/lib/toInt.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function toInt(str, radix) { + assertString(str); + return parseInt(str, radix || 10); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/trim.js b/node_modules/validator/es/lib/trim.js new file mode 100644 index 0000000..b9b8fa0 --- /dev/null +++ b/node_modules/validator/es/lib/trim.js @@ -0,0 +1,5 @@ +import rtrim from './rtrim'; +import ltrim from './ltrim'; +export default function trim(str, chars) { + return rtrim(ltrim(str, chars), chars); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/unescape.js b/node_modules/validator/es/lib/unescape.js new file mode 100644 index 0000000..8ff45f5 --- /dev/null +++ b/node_modules/validator/es/lib/unescape.js @@ -0,0 +1,7 @@ +import assertString from './util/assertString'; +export default function unescape(str) { + assertString(str); + return str.replace(/"/g, '"').replace(/'/g, "'").replace(/</g, '<').replace(/>/g, '>').replace(///g, '/').replace(/\/g, '\\').replace(/`/g, '`').replace(/&/g, '&'); // & replacement has to be the last one to prevent + // bugs with intermediate strings containing escape sequences + // See: https://github.com/validatorjs/validator.js/issues/1827 +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/algorithms.js b/node_modules/validator/es/lib/util/algorithms.js new file mode 100644 index 0000000..eca8d6d --- /dev/null +++ b/node_modules/validator/es/lib/util/algorithms.js @@ -0,0 +1,88 @@ +/** + * Algorithmic validation functions + * May be used as is or implemented in the workflow of other validators. + */ + +/* + * ISO 7064 validation function + * Called with a string of numbers (incl. check digit) + * to validate according to ISO 7064 (MOD 11, 10). + */ +export function iso7064Check(str) { + var checkvalue = 10; + + for (var i = 0; i < str.length - 1; i++) { + checkvalue = (parseInt(str[i], 10) + checkvalue) % 10 === 0 ? 10 * 2 % 11 : (parseInt(str[i], 10) + checkvalue) % 10 * 2 % 11; + } + + checkvalue = checkvalue === 1 ? 0 : 11 - checkvalue; + return checkvalue === parseInt(str[10], 10); +} +/* + * Luhn (mod 10) validation function + * Called with a string of numbers (incl. check digit) + * to validate according to the Luhn algorithm. + */ + +export function luhnCheck(str) { + var checksum = 0; + var second = false; + + for (var i = str.length - 1; i >= 0; i--) { + if (second) { + var product = parseInt(str[i], 10) * 2; + + if (product > 9) { + // sum digits of product and add to checksum + checksum += product.toString().split('').map(function (a) { + return parseInt(a, 10); + }).reduce(function (a, b) { + return a + b; + }, 0); + } else { + checksum += product; + } + } else { + checksum += parseInt(str[i], 10); + } + + second = !second; + } + + return checksum % 10 === 0; +} +/* + * Reverse TIN multiplication and summation helper function + * Called with an array of single-digit integers and a base multiplier + * to calculate the sum of the digits multiplied in reverse. + * Normally used in variations of MOD 11 algorithmic checks. + */ + +export function reverseMultiplyAndSum(digits, base) { + var total = 0; + + for (var i = 0; i < digits.length; i++) { + total += digits[i] * (base - i); + } + + return total; +} +/* + * Verhoeff validation helper function + * Called with a string of numbers + * to validate according to the Verhoeff algorithm. + */ + +export function verhoeffCheck(str) { + var d_table = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 0, 6, 7, 8, 9, 5], [2, 3, 4, 0, 1, 7, 8, 9, 5, 6], [3, 4, 0, 1, 2, 8, 9, 5, 6, 7], [4, 0, 1, 2, 3, 9, 5, 6, 7, 8], [5, 9, 8, 7, 6, 0, 4, 3, 2, 1], [6, 5, 9, 8, 7, 1, 0, 4, 3, 2], [7, 6, 5, 9, 8, 2, 1, 0, 4, 3], [8, 7, 6, 5, 9, 3, 2, 1, 0, 4], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]; + var p_table = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 5, 7, 6, 2, 8, 3, 0, 9, 4], [5, 8, 0, 3, 7, 9, 6, 1, 4, 2], [8, 9, 1, 6, 0, 4, 3, 5, 2, 7], [9, 4, 5, 3, 1, 2, 6, 8, 7, 0], [4, 2, 8, 6, 5, 7, 3, 9, 0, 1], [2, 7, 9, 3, 8, 0, 6, 4, 1, 5], [7, 0, 4, 6, 9, 1, 3, 2, 5, 8]]; // Copy (to prevent replacement) and reverse + + var str_copy = str.split('').reverse().join(''); + var checksum = 0; + + for (var i = 0; i < str_copy.length; i++) { + checksum = d_table[checksum][p_table[i % 8][parseInt(str_copy[i], 10)]]; + } + + return checksum === 0; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/assertString.js b/node_modules/validator/es/lib/util/assertString.js new file mode 100644 index 0000000..f7eced5 --- /dev/null +++ b/node_modules/validator/es/lib/util/assertString.js @@ -0,0 +1,12 @@ +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +export default function assertString(input) { + var isString = typeof input === 'string' || input instanceof String; + + if (!isString) { + var invalidType = _typeof(input); + + if (input === null) invalidType = 'null';else if (invalidType === 'object') invalidType = input.constructor.name; + throw new TypeError("Expected a string but received a ".concat(invalidType)); + } +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/includes.js b/node_modules/validator/es/lib/util/includes.js new file mode 100644 index 0000000..b01c692 --- /dev/null +++ b/node_modules/validator/es/lib/util/includes.js @@ -0,0 +1,7 @@ +var includes = function includes(arr, val) { + return arr.some(function (arrVal) { + return val === arrVal; + }); +}; + +export default includes; \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/merge.js b/node_modules/validator/es/lib/util/merge.js new file mode 100644 index 0000000..0d1f699 --- /dev/null +++ b/node_modules/validator/es/lib/util/merge.js @@ -0,0 +1,12 @@ +export default function merge() { + var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var defaults = arguments.length > 1 ? arguments[1] : undefined; + + for (var key in defaults) { + if (typeof obj[key] === 'undefined') { + obj[key] = defaults[key]; + } + } + + return obj; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/multilineRegex.js b/node_modules/validator/es/lib/util/multilineRegex.js new file mode 100644 index 0000000..fed25a3 --- /dev/null +++ b/node_modules/validator/es/lib/util/multilineRegex.js @@ -0,0 +1,12 @@ +/** + * Build RegExp object from an array + * of multiple/multi-line regexp parts + * + * @param {string[]} parts + * @param {string} flags + * @return {object} - RegExp object + */ +export default function multilineRegexp(parts, flags) { + var regexpAsStringLiteral = parts.join(''); + return new RegExp(regexpAsStringLiteral, flags); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/toString.js b/node_modules/validator/es/lib/util/toString.js new file mode 100644 index 0000000..f483fa4 --- /dev/null +++ b/node_modules/validator/es/lib/util/toString.js @@ -0,0 +1,15 @@ +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +export default function toString(input) { + if (_typeof(input) === 'object' && input !== null) { + if (typeof input.toString === 'function') { + input = input.toString(); + } else { + input = '[object Object]'; + } + } else if (input === null || typeof input === 'undefined' || isNaN(input) && !input.length) { + input = ''; + } + + return String(input); +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/util/typeOf.js b/node_modules/validator/es/lib/util/typeOf.js new file mode 100644 index 0000000..b2ce41a --- /dev/null +++ b/node_modules/validator/es/lib/util/typeOf.js @@ -0,0 +1,10 @@ +/** + * Better way to handle type checking + * null, {}, array and date are objects, which confuses + */ +export default function typeOf(input) { + var rawObject = Object.prototype.toString.call(input).toLowerCase(); + var typeOfRegex = /\[object (.*)]/g; + var type = typeOfRegex.exec(rawObject)[1]; + return type; +} \ No newline at end of file diff --git a/node_modules/validator/es/lib/whitelist.js b/node_modules/validator/es/lib/whitelist.js new file mode 100644 index 0000000..244881b --- /dev/null +++ b/node_modules/validator/es/lib/whitelist.js @@ -0,0 +1,5 @@ +import assertString from './util/assertString'; +export default function whitelist(str, chars) { + assertString(str); + return str.replace(new RegExp("[^".concat(chars, "]+"), 'g'), ''); +} \ No newline at end of file diff --git a/node_modules/validator/index.js b/node_modules/validator/index.js new file mode 100644 index 0000000..36e33a0 --- /dev/null +++ b/node_modules/validator/index.js @@ -0,0 +1,309 @@ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _toDate = _interopRequireDefault(require("./lib/toDate")); + +var _toFloat = _interopRequireDefault(require("./lib/toFloat")); + +var _toInt = _interopRequireDefault(require("./lib/toInt")); + +var _toBoolean = _interopRequireDefault(require("./lib/toBoolean")); + +var _equals = _interopRequireDefault(require("./lib/equals")); + +var _contains = _interopRequireDefault(require("./lib/contains")); + +var _matches = _interopRequireDefault(require("./lib/matches")); + +var _isEmail = _interopRequireDefault(require("./lib/isEmail")); + +var _isURL = _interopRequireDefault(require("./lib/isURL")); + +var _isMACAddress = _interopRequireDefault(require("./lib/isMACAddress")); + +var _isIP = _interopRequireDefault(require("./lib/isIP")); + +var _isIPRange = _interopRequireDefault(require("./lib/isIPRange")); + +var _isFQDN = _interopRequireDefault(require("./lib/isFQDN")); + +var _isDate = _interopRequireDefault(require("./lib/isDate")); + +var _isBoolean = _interopRequireDefault(require("./lib/isBoolean")); + +var _isLocale = _interopRequireDefault(require("./lib/isLocale")); + +var _isAlpha = _interopRequireWildcard(require("./lib/isAlpha")); + +var _isAlphanumeric = _interopRequireWildcard(require("./lib/isAlphanumeric")); + +var _isNumeric = _interopRequireDefault(require("./lib/isNumeric")); + +var _isPassportNumber = _interopRequireDefault(require("./lib/isPassportNumber")); + +var _isPort = _interopRequireDefault(require("./lib/isPort")); + +var _isLowercase = _interopRequireDefault(require("./lib/isLowercase")); + +var _isUppercase = _interopRequireDefault(require("./lib/isUppercase")); + +var _isIMEI = _interopRequireDefault(require("./lib/isIMEI")); + +var _isAscii = _interopRequireDefault(require("./lib/isAscii")); + +var _isFullWidth = _interopRequireDefault(require("./lib/isFullWidth")); + +var _isHalfWidth = _interopRequireDefault(require("./lib/isHalfWidth")); + +var _isVariableWidth = _interopRequireDefault(require("./lib/isVariableWidth")); + +var _isMultibyte = _interopRequireDefault(require("./lib/isMultibyte")); + +var _isSemVer = _interopRequireDefault(require("./lib/isSemVer")); + +var _isSurrogatePair = _interopRequireDefault(require("./lib/isSurrogatePair")); + +var _isInt = _interopRequireDefault(require("./lib/isInt")); + +var _isFloat = _interopRequireWildcard(require("./lib/isFloat")); + +var _isDecimal = _interopRequireDefault(require("./lib/isDecimal")); + +var _isHexadecimal = _interopRequireDefault(require("./lib/isHexadecimal")); + +var _isOctal = _interopRequireDefault(require("./lib/isOctal")); + +var _isDivisibleBy = _interopRequireDefault(require("./lib/isDivisibleBy")); + +var _isHexColor = _interopRequireDefault(require("./lib/isHexColor")); + +var _isRgbColor = _interopRequireDefault(require("./lib/isRgbColor")); + +var _isHSL = _interopRequireDefault(require("./lib/isHSL")); + +var _isISRC = _interopRequireDefault(require("./lib/isISRC")); + +var _isIBAN = _interopRequireWildcard(require("./lib/isIBAN")); + +var _isBIC = _interopRequireDefault(require("./lib/isBIC")); + +var _isMD = _interopRequireDefault(require("./lib/isMD5")); + +var _isHash = _interopRequireDefault(require("./lib/isHash")); + +var _isJWT = _interopRequireDefault(require("./lib/isJWT")); + +var _isJSON = _interopRequireDefault(require("./lib/isJSON")); + +var _isEmpty = _interopRequireDefault(require("./lib/isEmpty")); + +var _isLength = _interopRequireDefault(require("./lib/isLength")); + +var _isByteLength = _interopRequireDefault(require("./lib/isByteLength")); + +var _isUUID = _interopRequireDefault(require("./lib/isUUID")); + +var _isMongoId = _interopRequireDefault(require("./lib/isMongoId")); + +var _isAfter = _interopRequireDefault(require("./lib/isAfter")); + +var _isBefore = _interopRequireDefault(require("./lib/isBefore")); + +var _isIn = _interopRequireDefault(require("./lib/isIn")); + +var _isCreditCard = _interopRequireDefault(require("./lib/isCreditCard")); + +var _isIdentityCard = _interopRequireDefault(require("./lib/isIdentityCard")); + +var _isEAN = _interopRequireDefault(require("./lib/isEAN")); + +var _isISIN = _interopRequireDefault(require("./lib/isISIN")); + +var _isISBN = _interopRequireDefault(require("./lib/isISBN")); + +var _isISSN = _interopRequireDefault(require("./lib/isISSN")); + +var _isTaxID = _interopRequireDefault(require("./lib/isTaxID")); + +var _isMobilePhone = _interopRequireWildcard(require("./lib/isMobilePhone")); + +var _isEthereumAddress = _interopRequireDefault(require("./lib/isEthereumAddress")); + +var _isCurrency = _interopRequireDefault(require("./lib/isCurrency")); + +var _isBtcAddress = _interopRequireDefault(require("./lib/isBtcAddress")); + +var _isISO = _interopRequireDefault(require("./lib/isISO8601")); + +var _isRFC = _interopRequireDefault(require("./lib/isRFC3339")); + +var _isISO31661Alpha = _interopRequireDefault(require("./lib/isISO31661Alpha2")); + +var _isISO31661Alpha2 = _interopRequireDefault(require("./lib/isISO31661Alpha3")); + +var _isISO2 = _interopRequireDefault(require("./lib/isISO4217")); + +var _isBase = _interopRequireDefault(require("./lib/isBase32")); + +var _isBase2 = _interopRequireDefault(require("./lib/isBase58")); + +var _isBase3 = _interopRequireDefault(require("./lib/isBase64")); + +var _isDataURI = _interopRequireDefault(require("./lib/isDataURI")); + +var _isMagnetURI = _interopRequireDefault(require("./lib/isMagnetURI")); + +var _isMimeType = _interopRequireDefault(require("./lib/isMimeType")); + +var _isLatLong = _interopRequireDefault(require("./lib/isLatLong")); + +var _isPostalCode = _interopRequireWildcard(require("./lib/isPostalCode")); + +var _ltrim = _interopRequireDefault(require("./lib/ltrim")); + +var _rtrim = _interopRequireDefault(require("./lib/rtrim")); + +var _trim = _interopRequireDefault(require("./lib/trim")); + +var _escape = _interopRequireDefault(require("./lib/escape")); + +var _unescape = _interopRequireDefault(require("./lib/unescape")); + +var _stripLow = _interopRequireDefault(require("./lib/stripLow")); + +var _whitelist = _interopRequireDefault(require("./lib/whitelist")); + +var _blacklist = _interopRequireDefault(require("./lib/blacklist")); + +var _isWhitelisted = _interopRequireDefault(require("./lib/isWhitelisted")); + +var _normalizeEmail = _interopRequireDefault(require("./lib/normalizeEmail")); + +var _isSlug = _interopRequireDefault(require("./lib/isSlug")); + +var _isLicensePlate = _interopRequireDefault(require("./lib/isLicensePlate")); + +var _isStrongPassword = _interopRequireDefault(require("./lib/isStrongPassword")); + +var _isVAT = _interopRequireDefault(require("./lib/isVAT")); + +function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function _getRequireWildcardCache() { return cache; }; return cache; } + +function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var version = '13.7.0'; +var validator = { + version: version, + toDate: _toDate.default, + toFloat: _toFloat.default, + toInt: _toInt.default, + toBoolean: _toBoolean.default, + equals: _equals.default, + contains: _contains.default, + matches: _matches.default, + isEmail: _isEmail.default, + isURL: _isURL.default, + isMACAddress: _isMACAddress.default, + isIP: _isIP.default, + isIPRange: _isIPRange.default, + isFQDN: _isFQDN.default, + isBoolean: _isBoolean.default, + isIBAN: _isIBAN.default, + isBIC: _isBIC.default, + isAlpha: _isAlpha.default, + isAlphaLocales: _isAlpha.locales, + isAlphanumeric: _isAlphanumeric.default, + isAlphanumericLocales: _isAlphanumeric.locales, + isNumeric: _isNumeric.default, + isPassportNumber: _isPassportNumber.default, + isPort: _isPort.default, + isLowercase: _isLowercase.default, + isUppercase: _isUppercase.default, + isAscii: _isAscii.default, + isFullWidth: _isFullWidth.default, + isHalfWidth: _isHalfWidth.default, + isVariableWidth: _isVariableWidth.default, + isMultibyte: _isMultibyte.default, + isSemVer: _isSemVer.default, + isSurrogatePair: _isSurrogatePair.default, + isInt: _isInt.default, + isIMEI: _isIMEI.default, + isFloat: _isFloat.default, + isFloatLocales: _isFloat.locales, + isDecimal: _isDecimal.default, + isHexadecimal: _isHexadecimal.default, + isOctal: _isOctal.default, + isDivisibleBy: _isDivisibleBy.default, + isHexColor: _isHexColor.default, + isRgbColor: _isRgbColor.default, + isHSL: _isHSL.default, + isISRC: _isISRC.default, + isMD5: _isMD.default, + isHash: _isHash.default, + isJWT: _isJWT.default, + isJSON: _isJSON.default, + isEmpty: _isEmpty.default, + isLength: _isLength.default, + isLocale: _isLocale.default, + isByteLength: _isByteLength.default, + isUUID: _isUUID.default, + isMongoId: _isMongoId.default, + isAfter: _isAfter.default, + isBefore: _isBefore.default, + isIn: _isIn.default, + isCreditCard: _isCreditCard.default, + isIdentityCard: _isIdentityCard.default, + isEAN: _isEAN.default, + isISIN: _isISIN.default, + isISBN: _isISBN.default, + isISSN: _isISSN.default, + isMobilePhone: _isMobilePhone.default, + isMobilePhoneLocales: _isMobilePhone.locales, + isPostalCode: _isPostalCode.default, + isPostalCodeLocales: _isPostalCode.locales, + isEthereumAddress: _isEthereumAddress.default, + isCurrency: _isCurrency.default, + isBtcAddress: _isBtcAddress.default, + isISO8601: _isISO.default, + isRFC3339: _isRFC.default, + isISO31661Alpha2: _isISO31661Alpha.default, + isISO31661Alpha3: _isISO31661Alpha2.default, + isISO4217: _isISO2.default, + isBase32: _isBase.default, + isBase58: _isBase2.default, + isBase64: _isBase3.default, + isDataURI: _isDataURI.default, + isMagnetURI: _isMagnetURI.default, + isMimeType: _isMimeType.default, + isLatLong: _isLatLong.default, + ltrim: _ltrim.default, + rtrim: _rtrim.default, + trim: _trim.default, + escape: _escape.default, + unescape: _unescape.default, + stripLow: _stripLow.default, + whitelist: _whitelist.default, + blacklist: _blacklist.default, + isWhitelisted: _isWhitelisted.default, + normalizeEmail: _normalizeEmail.default, + toString: toString, + isSlug: _isSlug.default, + isStrongPassword: _isStrongPassword.default, + isTaxID: _isTaxID.default, + isDate: _isDate.default, + isLicensePlate: _isLicensePlate.default, + isVAT: _isVAT.default, + ibanLocales: _isIBAN.locales +}; +var _default = validator; +exports.default = _default; +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/alpha.js b/node_modules/validator/lib/alpha.js new file mode 100644 index 0000000..7dacc73 --- /dev/null +++ b/node_modules/validator/lib/alpha.js @@ -0,0 +1,137 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.commaDecimal = exports.dotDecimal = exports.farsiLocales = exports.arabicLocales = exports.englishLocales = exports.decimal = exports.alphanumeric = exports.alpha = void 0; +var alpha = { + 'en-US': /^[A-Z]+$/i, + 'az-AZ': /^[A-VXYZÇƏĞİıÖŞÜ]+$/i, + 'bg-BG': /^[А-Я]+$/i, + 'cs-CZ': /^[A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i, + 'da-DK': /^[A-ZÆØÅ]+$/i, + 'de-DE': /^[A-ZÄÖÜß]+$/i, + 'el-GR': /^[Α-ώ]+$/i, + 'es-ES': /^[A-ZÁÉÍÑÓÚÜ]+$/i, + 'fa-IR': /^[ابپتثجچحخدذرزژسشصضطظعغفقکگلمنوهی]+$/i, + 'fi-FI': /^[A-ZÅÄÖ]+$/i, + 'fr-FR': /^[A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i, + 'it-IT': /^[A-ZÀÉÈÌÎÓÒÙ]+$/i, + 'nb-NO': /^[A-ZÆØÅ]+$/i, + 'nl-NL': /^[A-ZÁÉËÏÓÖÜÚ]+$/i, + 'nn-NO': /^[A-ZÆØÅ]+$/i, + 'hu-HU': /^[A-ZÁÉÍÓÖŐÚÜŰ]+$/i, + 'pl-PL': /^[A-ZĄĆĘŚŁŃÓŻŹ]+$/i, + 'pt-PT': /^[A-ZÃÁÀÂÄÇÉÊËÍÏÕÓÔÖÚÜ]+$/i, + 'ru-RU': /^[А-ЯЁ]+$/i, + 'sl-SI': /^[A-ZČĆĐŠŽ]+$/i, + 'sk-SK': /^[A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i, + 'sr-RS@latin': /^[A-ZČĆŽŠĐ]+$/i, + 'sr-RS': /^[А-ЯЂЈЉЊЋЏ]+$/i, + 'sv-SE': /^[A-ZÅÄÖ]+$/i, + 'th-TH': /^[ก-๐\s]+$/i, + 'tr-TR': /^[A-ZÇĞİıÖŞÜ]+$/i, + 'uk-UA': /^[А-ЩЬЮЯЄIЇҐі]+$/i, + 'vi-VN': /^[A-ZÀÁẠẢÃÂẦẤẬẨẪĂẰẮẶẲẴĐÈÉẸẺẼÊỀẾỆỂỄÌÍỊỈĨÒÓỌỎÕÔỒỐỘỔỖƠỜỚỢỞỠÙÚỤỦŨƯỪỨỰỬỮỲÝỴỶỸ]+$/i, + 'ku-IQ': /^[ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i, + ar: /^[ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/, + he: /^[א-ת]+$/, + fa: /^['آاءأؤئبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهةی']+$/i, + 'hi-IN': /^[\u0900-\u0961]+[\u0972-\u097F]*$/i +}; +exports.alpha = alpha; +var alphanumeric = { + 'en-US': /^[0-9A-Z]+$/i, + 'az-AZ': /^[0-9A-VXYZÇƏĞİıÖŞÜ]+$/i, + 'bg-BG': /^[0-9А-Я]+$/i, + 'cs-CZ': /^[0-9A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i, + 'da-DK': /^[0-9A-ZÆØÅ]+$/i, + 'de-DE': /^[0-9A-ZÄÖÜß]+$/i, + 'el-GR': /^[0-9Α-ω]+$/i, + 'es-ES': /^[0-9A-ZÁÉÍÑÓÚÜ]+$/i, + 'fi-FI': /^[0-9A-ZÅÄÖ]+$/i, + 'fr-FR': /^[0-9A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i, + 'it-IT': /^[0-9A-ZÀÉÈÌÎÓÒÙ]+$/i, + 'hu-HU': /^[0-9A-ZÁÉÍÓÖŐÚÜŰ]+$/i, + 'nb-NO': /^[0-9A-ZÆØÅ]+$/i, + 'nl-NL': /^[0-9A-ZÁÉËÏÓÖÜÚ]+$/i, + 'nn-NO': /^[0-9A-ZÆØÅ]+$/i, + 'pl-PL': /^[0-9A-ZĄĆĘŚŁŃÓŻŹ]+$/i, + 'pt-PT': /^[0-9A-ZÃÁÀÂÄÇÉÊËÍÏÕÓÔÖÚÜ]+$/i, + 'ru-RU': /^[0-9А-ЯЁ]+$/i, + 'sl-SI': /^[0-9A-ZČĆĐŠŽ]+$/i, + 'sk-SK': /^[0-9A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i, + 'sr-RS@latin': /^[0-9A-ZČĆŽŠĐ]+$/i, + 'sr-RS': /^[0-9А-ЯЂЈЉЊЋЏ]+$/i, + 'sv-SE': /^[0-9A-ZÅÄÖ]+$/i, + 'th-TH': /^[ก-๙\s]+$/i, + 'tr-TR': /^[0-9A-ZÇĞİıÖŞÜ]+$/i, + 'uk-UA': /^[0-9А-ЩЬЮЯЄIЇҐі]+$/i, + 'ku-IQ': /^[٠١٢٣٤٥٦٧٨٩0-9ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i, + 'vi-VN': /^[0-9A-ZÀÁẠẢÃÂẦẤẬẨẪĂẰẮẶẲẴĐÈÉẸẺẼÊỀẾỆỂỄÌÍỊỈĨÒÓỌỎÕÔỒỐỘỔỖƠỜỚỢỞỠÙÚỤỦŨƯỪỨỰỬỮỲÝỴỶỸ]+$/i, + ar: /^[٠١٢٣٤٥٦٧٨٩0-9ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/, + he: /^[0-9א-ת]+$/, + fa: /^['0-9آاءأؤئبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهةی۱۲۳۴۵۶۷۸۹۰']+$/i, + 'hi-IN': /^[\u0900-\u0963]+[\u0966-\u097F]*$/i +}; +exports.alphanumeric = alphanumeric; +var decimal = { + 'en-US': '.', + ar: '٫' +}; +exports.decimal = decimal; +var englishLocales = ['AU', 'GB', 'HK', 'IN', 'NZ', 'ZA', 'ZM']; +exports.englishLocales = englishLocales; + +for (var locale, i = 0; i < englishLocales.length; i++) { + locale = "en-".concat(englishLocales[i]); + alpha[locale] = alpha['en-US']; + alphanumeric[locale] = alphanumeric['en-US']; + decimal[locale] = decimal['en-US']; +} // Source: http://www.localeplanet.com/java/ + + +var arabicLocales = ['AE', 'BH', 'DZ', 'EG', 'IQ', 'JO', 'KW', 'LB', 'LY', 'MA', 'QM', 'QA', 'SA', 'SD', 'SY', 'TN', 'YE']; +exports.arabicLocales = arabicLocales; + +for (var _locale, _i = 0; _i < arabicLocales.length; _i++) { + _locale = "ar-".concat(arabicLocales[_i]); + alpha[_locale] = alpha.ar; + alphanumeric[_locale] = alphanumeric.ar; + decimal[_locale] = decimal.ar; +} + +var farsiLocales = ['IR', 'AF']; +exports.farsiLocales = farsiLocales; + +for (var _locale2, _i2 = 0; _i2 < farsiLocales.length; _i2++) { + _locale2 = "fa-".concat(farsiLocales[_i2]); + alphanumeric[_locale2] = alphanumeric.fa; + decimal[_locale2] = decimal.ar; +} // Source: https://en.wikipedia.org/wiki/Decimal_mark + + +var dotDecimal = ['ar-EG', 'ar-LB', 'ar-LY']; +exports.dotDecimal = dotDecimal; +var commaDecimal = ['bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'el-GR', 'en-ZM', 'es-ES', 'fr-CA', 'fr-FR', 'id-ID', 'it-IT', 'ku-IQ', 'hi-IN', 'hu-HU', 'nb-NO', 'nn-NO', 'nl-NL', 'pl-PL', 'pt-PT', 'ru-RU', 'sl-SI', 'sr-RS@latin', 'sr-RS', 'sv-SE', 'tr-TR', 'uk-UA', 'vi-VN']; +exports.commaDecimal = commaDecimal; + +for (var _i3 = 0; _i3 < dotDecimal.length; _i3++) { + decimal[dotDecimal[_i3]] = decimal['en-US']; +} + +for (var _i4 = 0; _i4 < commaDecimal.length; _i4++) { + decimal[commaDecimal[_i4]] = ','; +} + +alpha['fr-CA'] = alpha['fr-FR']; +alphanumeric['fr-CA'] = alphanumeric['fr-FR']; +alpha['pt-BR'] = alpha['pt-PT']; +alphanumeric['pt-BR'] = alphanumeric['pt-PT']; +decimal['pt-BR'] = decimal['pt-PT']; // see #862 + +alpha['pl-Pl'] = alpha['pl-PL']; +alphanumeric['pl-Pl'] = alphanumeric['pl-PL']; +decimal['pl-Pl'] = decimal['pl-PL']; // see #1455 + +alpha['fa-AF'] = alpha.fa; \ No newline at end of file diff --git a/node_modules/validator/lib/blacklist.js b/node_modules/validator/lib/blacklist.js new file mode 100644 index 0000000..5dd42ed --- /dev/null +++ b/node_modules/validator/lib/blacklist.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = blacklist; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function blacklist(str, chars) { + (0, _assertString.default)(str); + return str.replace(new RegExp("[".concat(chars, "]+"), 'g'), ''); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/contains.js b/node_modules/validator/lib/contains.js new file mode 100644 index 0000000..ee3843b --- /dev/null +++ b/node_modules/validator/lib/contains.js @@ -0,0 +1,33 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = contains; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _toString = _interopRequireDefault(require("./util/toString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var defaulContainsOptions = { + ignoreCase: false, + minOccurrences: 1 +}; + +function contains(str, elem, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, defaulContainsOptions); + + if (options.ignoreCase) { + return str.toLowerCase().split((0, _toString.default)(elem).toLowerCase()).length > options.minOccurrences; + } + + return str.split((0, _toString.default)(elem)).length > options.minOccurrences; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/equals.js b/node_modules/validator/lib/equals.js new file mode 100644 index 0000000..a33c5ab --- /dev/null +++ b/node_modules/validator/lib/equals.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = equals; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function equals(str, comparison) { + (0, _assertString.default)(str); + return str === comparison; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/escape.js b/node_modules/validator/lib/escape.js new file mode 100644 index 0000000..05e4220 --- /dev/null +++ b/node_modules/validator/lib/escape.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = escape; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function escape(str) { + (0, _assertString.default)(str); + return str.replace(/&/g, '&').replace(/"/g, '"').replace(/'/g, ''').replace(//g, '>').replace(/\//g, '/').replace(/\\/g, '\').replace(/`/g, '`'); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isAfter.js b/node_modules/validator/lib/isAfter.js new file mode 100644 index 0000000..1fa18ad --- /dev/null +++ b/node_modules/validator/lib/isAfter.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isAfter; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _toDate = _interopRequireDefault(require("./toDate")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isAfter(str) { + var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : String(new Date()); + (0, _assertString.default)(str); + var comparison = (0, _toDate.default)(date); + var original = (0, _toDate.default)(str); + return !!(original && comparison && original > comparison); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isAlpha.js b/node_modules/validator/lib/isAlpha.js new file mode 100644 index 0000000..bfb0c17 --- /dev/null +++ b/node_modules/validator/lib/isAlpha.js @@ -0,0 +1,40 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isAlpha; +exports.locales = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _alpha = require("./alpha"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isAlpha(_str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + (0, _assertString.default)(_str); + var str = _str; + var ignore = options.ignore; + + if (ignore) { + if (ignore instanceof RegExp) { + str = str.replace(ignore, ''); + } else if (typeof ignore === 'string') { + str = str.replace(new RegExp("[".concat(ignore.replace(/[-[\]{}()*+?.,\\^$|#\\s]/g, '\\$&'), "]"), 'g'), ''); // escape regex for ignore + } else { + throw new Error('ignore should be instance of a String or RegExp'); + } + } + + if (locale in _alpha.alpha) { + return _alpha.alpha[locale].test(str); + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +var locales = Object.keys(_alpha.alpha); +exports.locales = locales; \ No newline at end of file diff --git a/node_modules/validator/lib/isAlphanumeric.js b/node_modules/validator/lib/isAlphanumeric.js new file mode 100644 index 0000000..e4d1d44 --- /dev/null +++ b/node_modules/validator/lib/isAlphanumeric.js @@ -0,0 +1,40 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isAlphanumeric; +exports.locales = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _alpha = require("./alpha"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isAlphanumeric(_str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + (0, _assertString.default)(_str); + var str = _str; + var ignore = options.ignore; + + if (ignore) { + if (ignore instanceof RegExp) { + str = str.replace(ignore, ''); + } else if (typeof ignore === 'string') { + str = str.replace(new RegExp("[".concat(ignore.replace(/[-[\]{}()*+?.,\\^$|#\\s]/g, '\\$&'), "]"), 'g'), ''); // escape regex for ignore + } else { + throw new Error('ignore should be instance of a String or RegExp'); + } + } + + if (locale in _alpha.alphanumeric) { + return _alpha.alphanumeric[locale].test(str); + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +var locales = Object.keys(_alpha.alphanumeric); +exports.locales = locales; \ No newline at end of file diff --git a/node_modules/validator/lib/isAscii.js b/node_modules/validator/lib/isAscii.js new file mode 100644 index 0000000..3c62271 --- /dev/null +++ b/node_modules/validator/lib/isAscii.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isAscii; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* eslint-disable no-control-regex */ +var ascii = /^[\x00-\x7F]+$/; +/* eslint-enable no-control-regex */ + +function isAscii(str) { + (0, _assertString.default)(str); + return ascii.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBIC.js b/node_modules/validator/lib/isBIC.js new file mode 100644 index 0000000..a776707 --- /dev/null +++ b/node_modules/validator/lib/isBIC.js @@ -0,0 +1,29 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBIC; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isISO31661Alpha = require("./isISO31661Alpha2"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// https://en.wikipedia.org/wiki/ISO_9362 +var isBICReg = /^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$/; + +function isBIC(str) { + (0, _assertString.default)(str); // toUpperCase() should be removed when a new major version goes out that changes + // the regex to [A-Z] (per the spec). + + if (!_isISO31661Alpha.CountryCodes.has(str.slice(4, 6).toUpperCase())) { + return false; + } + + return isBICReg.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBase32.js b/node_modules/validator/lib/isBase32.js new file mode 100644 index 0000000..293449f --- /dev/null +++ b/node_modules/validator/lib/isBase32.js @@ -0,0 +1,26 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBase32; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var base32 = /^[A-Z2-7]+=*$/; + +function isBase32(str) { + (0, _assertString.default)(str); + var len = str.length; + + if (len % 8 === 0 && base32.test(str)) { + return true; + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBase58.js b/node_modules/validator/lib/isBase58.js new file mode 100644 index 0000000..a98c82c --- /dev/null +++ b/node_modules/validator/lib/isBase58.js @@ -0,0 +1,26 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBase58; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// Accepted chars - 123456789ABCDEFGH JKLMN PQRSTUVWXYZabcdefghijk mnopqrstuvwxyz +var base58Reg = /^[A-HJ-NP-Za-km-z1-9]*$/; + +function isBase58(str) { + (0, _assertString.default)(str); + + if (base58Reg.test(str)) { + return true; + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBase64.js b/node_modules/validator/lib/isBase64.js new file mode 100644 index 0000000..6863683 --- /dev/null +++ b/node_modules/validator/lib/isBase64.js @@ -0,0 +1,38 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBase64; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var notBase64 = /[^A-Z0-9+\/=]/i; +var urlSafeBase64 = /^[A-Z0-9_\-]*$/i; +var defaultBase64Options = { + urlSafe: false +}; + +function isBase64(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, defaultBase64Options); + var len = str.length; + + if (options.urlSafe) { + return urlSafeBase64.test(str); + } + + if (len % 4 !== 0 || notBase64.test(str)) { + return false; + } + + var firstPaddingChar = str.indexOf('='); + return firstPaddingChar === -1 || firstPaddingChar === len - 1 || firstPaddingChar === len - 2 && str[len - 1] === '='; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBefore.js b/node_modules/validator/lib/isBefore.js new file mode 100644 index 0000000..a54eda8 --- /dev/null +++ b/node_modules/validator/lib/isBefore.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBefore; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _toDate = _interopRequireDefault(require("./toDate")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isBefore(str) { + var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : String(new Date()); + (0, _assertString.default)(str); + var comparison = (0, _toDate.default)(date); + var original = (0, _toDate.default)(str); + return !!(original && comparison && original < comparison); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBoolean.js b/node_modules/validator/lib/isBoolean.js new file mode 100644 index 0000000..e89a0e4 --- /dev/null +++ b/node_modules/validator/lib/isBoolean.js @@ -0,0 +1,30 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBoolean; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var defaultOptions = { + loose: false +}; +var strictBooleans = ['true', 'false', '1', '0']; +var looseBooleans = [].concat(strictBooleans, ['yes', 'no']); + +function isBoolean(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : defaultOptions; + (0, _assertString.default)(str); + + if (options.loose) { + return looseBooleans.includes(str.toLowerCase()); + } + + return strictBooleans.includes(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isBtcAddress.js b/node_modules/validator/lib/isBtcAddress.js new file mode 100644 index 0000000..d1cb45c --- /dev/null +++ b/node_modules/validator/lib/isBtcAddress.js @@ -0,0 +1,27 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isBtcAddress; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// supports Bech32 addresses +var bech32 = /^(bc1)[a-z0-9]{25,39}$/; +var base58 = /^(1|3)[A-HJ-NP-Za-km-z1-9]{25,39}$/; + +function isBtcAddress(str) { + (0, _assertString.default)(str); // check for bech32 + + if (str.startsWith('bc1')) { + return bech32.test(str); + } + + return base58.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isByteLength.js b/node_modules/validator/lib/isByteLength.js new file mode 100644 index 0000000..c1370ea --- /dev/null +++ b/node_modules/validator/lib/isByteLength.js @@ -0,0 +1,34 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isByteLength; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +/* eslint-disable prefer-rest-params */ +function isByteLength(str, options) { + (0, _assertString.default)(str); + var min; + var max; + + if (_typeof(options) === 'object') { + min = options.min || 0; + max = options.max; + } else { + // backwards compatibility: isByteLength(str, min [, max]) + min = arguments[1]; + max = arguments[2]; + } + + var len = encodeURI(str).split(/%..|./).length - 1; + return len >= min && (typeof max === 'undefined' || len <= max); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isCreditCard.js b/node_modules/validator/lib/isCreditCard.js new file mode 100644 index 0000000..c7ab315 --- /dev/null +++ b/node_modules/validator/lib/isCreditCard.js @@ -0,0 +1,52 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isCreditCard; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* eslint-disable max-len */ +var creditCard = /^(?:4[0-9]{12}(?:[0-9]{3,6})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12,15}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11}|6[27][0-9]{14}|^(81[0-9]{14,17}))$/; +/* eslint-enable max-len */ + +function isCreditCard(str) { + (0, _assertString.default)(str); + var sanitized = str.replace(/[- ]+/g, ''); + + if (!creditCard.test(sanitized)) { + return false; + } + + var sum = 0; + var digit; + var tmpNum; + var shouldDouble; + + for (var i = sanitized.length - 1; i >= 0; i--) { + digit = sanitized.substring(i, i + 1); + tmpNum = parseInt(digit, 10); + + if (shouldDouble) { + tmpNum *= 2; + + if (tmpNum >= 10) { + sum += tmpNum % 10 + 1; + } else { + sum += tmpNum; + } + } else { + sum += tmpNum; + } + + shouldDouble = !shouldDouble; + } + + return !!(sum % 10 === 0 ? sanitized : false); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isCurrency.js b/node_modules/validator/lib/isCurrency.js new file mode 100755 index 0000000..cd7def6 --- /dev/null +++ b/node_modules/validator/lib/isCurrency.js @@ -0,0 +1,91 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isCurrency; + +var _merge = _interopRequireDefault(require("./util/merge")); + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function currencyRegex(options) { + var decimal_digits = "\\d{".concat(options.digits_after_decimal[0], "}"); + options.digits_after_decimal.forEach(function (digit, index) { + if (index !== 0) decimal_digits = "".concat(decimal_digits, "|\\d{").concat(digit, "}"); + }); + var symbol = "(".concat(options.symbol.replace(/\W/, function (m) { + return "\\".concat(m); + }), ")").concat(options.require_symbol ? '' : '?'), + negative = '-?', + whole_dollar_amount_without_sep = '[1-9]\\d*', + whole_dollar_amount_with_sep = "[1-9]\\d{0,2}(\\".concat(options.thousands_separator, "\\d{3})*"), + valid_whole_dollar_amounts = ['0', whole_dollar_amount_without_sep, whole_dollar_amount_with_sep], + whole_dollar_amount = "(".concat(valid_whole_dollar_amounts.join('|'), ")?"), + decimal_amount = "(\\".concat(options.decimal_separator, "(").concat(decimal_digits, "))").concat(options.require_decimal ? '' : '?'); + var pattern = whole_dollar_amount + (options.allow_decimal || options.require_decimal ? decimal_amount : ''); // default is negative sign before symbol, but there are two other options (besides parens) + + if (options.allow_negatives && !options.parens_for_negatives) { + if (options.negative_sign_after_digits) { + pattern += negative; + } else if (options.negative_sign_before_digits) { + pattern = negative + pattern; + } + } // South African Rand, for example, uses R 123 (space) and R-123 (no space) + + + if (options.allow_negative_sign_placeholder) { + pattern = "( (?!\\-))?".concat(pattern); + } else if (options.allow_space_after_symbol) { + pattern = " ?".concat(pattern); + } else if (options.allow_space_after_digits) { + pattern += '( (?!$))?'; + } + + if (options.symbol_after_digits) { + pattern += symbol; + } else { + pattern = symbol + pattern; + } + + if (options.allow_negatives) { + if (options.parens_for_negatives) { + pattern = "(\\(".concat(pattern, "\\)|").concat(pattern, ")"); + } else if (!(options.negative_sign_before_digits || options.negative_sign_after_digits)) { + pattern = negative + pattern; + } + } // ensure there's a dollar and/or decimal amount, and that + // it doesn't start with a space or a negative sign followed by a space + + + return new RegExp("^(?!-? )(?=.*\\d)".concat(pattern, "$")); +} + +var default_currency_options = { + symbol: '$', + require_symbol: false, + allow_space_after_symbol: false, + symbol_after_digits: false, + allow_negatives: true, + parens_for_negatives: false, + negative_sign_before_digits: false, + negative_sign_after_digits: false, + allow_negative_sign_placeholder: false, + thousands_separator: ',', + decimal_separator: '.', + allow_decimal: true, + require_decimal: false, + digits_after_decimal: [2], + allow_space_after_digits: false +}; + +function isCurrency(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, default_currency_options); + return currencyRegex(options).test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isDataURI.js b/node_modules/validator/lib/isDataURI.js new file mode 100644 index 0000000..78dc309 --- /dev/null +++ b/node_modules/validator/lib/isDataURI.js @@ -0,0 +1,53 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isDataURI; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var validMediaType = /^[a-z]+\/[a-z0-9\-\+]+$/i; +var validAttribute = /^[a-z\-]+=[a-z0-9\-]+$/i; +var validData = /^[a-z0-9!\$&'\(\)\*\+,;=\-\._~:@\/\?%\s]*$/i; + +function isDataURI(str) { + (0, _assertString.default)(str); + var data = str.split(','); + + if (data.length < 2) { + return false; + } + + var attributes = data.shift().trim().split(';'); + var schemeAndMediaType = attributes.shift(); + + if (schemeAndMediaType.substr(0, 5) !== 'data:') { + return false; + } + + var mediaType = schemeAndMediaType.substr(5); + + if (mediaType !== '' && !validMediaType.test(mediaType)) { + return false; + } + + for (var i = 0; i < attributes.length; i++) { + if (!(i === attributes.length - 1 && attributes[i].toLowerCase() === 'base64') && !validAttribute.test(attributes[i])) { + return false; + } + } + + for (var _i = 0; _i < data.length; _i++) { + if (!validData.test(data[_i])) { + return false; + } + } + + return true; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isDate.js b/node_modules/validator/lib/isDate.js new file mode 100644 index 0000000..b3d8010 --- /dev/null +++ b/node_modules/validator/lib/isDate.js @@ -0,0 +1,99 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isDate; + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); } + +function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _iterableToArrayLimit(arr, i) { if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } + +function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } + +function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e2) { throw _e2; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e3) { didErr = true; err = _e3; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +var default_date_options = { + format: 'YYYY/MM/DD', + delimiters: ['/', '-'], + strictMode: false +}; + +function isValidFormat(format) { + return /(^(y{4}|y{2})[.\/-](m{1,2})[.\/-](d{1,2})$)|(^(m{1,2})[.\/-](d{1,2})[.\/-]((y{4}|y{2})$))|(^(d{1,2})[.\/-](m{1,2})[.\/-]((y{4}|y{2})$))/gi.test(format); +} + +function zip(date, format) { + var zippedArr = [], + len = Math.min(date.length, format.length); + + for (var i = 0; i < len; i++) { + zippedArr.push([date[i], format[i]]); + } + + return zippedArr; +} + +function isDate(input, options) { + if (typeof options === 'string') { + // Allow backward compatbility for old format isDate(input [, format]) + options = (0, _merge.default)({ + format: options + }, default_date_options); + } else { + options = (0, _merge.default)(options, default_date_options); + } + + if (typeof input === 'string' && isValidFormat(options.format)) { + var formatDelimiter = options.delimiters.find(function (delimiter) { + return options.format.indexOf(delimiter) !== -1; + }); + var dateDelimiter = options.strictMode ? formatDelimiter : options.delimiters.find(function (delimiter) { + return input.indexOf(delimiter) !== -1; + }); + var dateAndFormat = zip(input.split(dateDelimiter), options.format.toLowerCase().split(formatDelimiter)); + var dateObj = {}; + + var _iterator = _createForOfIteratorHelper(dateAndFormat), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _slicedToArray(_step.value, 2), + dateWord = _step$value[0], + formatWord = _step$value[1]; + + if (dateWord.length !== formatWord.length) { + return false; + } + + dateObj[formatWord.charAt(0)] = dateWord; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return new Date("".concat(dateObj.m, "/").concat(dateObj.d, "/").concat(dateObj.y)).getDate() === +dateObj.d; + } + + if (!options.strictMode) { + return Object.prototype.toString.call(input) === '[object Date]' && isFinite(input); + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isDecimal.js b/node_modules/validator/lib/isDecimal.js new file mode 100644 index 0000000..d45b05f --- /dev/null +++ b/node_modules/validator/lib/isDecimal.js @@ -0,0 +1,42 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isDecimal; + +var _merge = _interopRequireDefault(require("./util/merge")); + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _includes = _interopRequireDefault(require("./util/includes")); + +var _alpha = require("./alpha"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function decimalRegExp(options) { + var regExp = new RegExp("^[-+]?([0-9]+)?(\\".concat(_alpha.decimal[options.locale], "[0-9]{").concat(options.decimal_digits, "})").concat(options.force_decimal ? '' : '?', "$")); + return regExp; +} + +var default_decimal_options = { + force_decimal: false, + decimal_digits: '1,', + locale: 'en-US' +}; +var blacklist = ['', '-', '+']; + +function isDecimal(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, default_decimal_options); + + if (options.locale in _alpha.decimal) { + return !(0, _includes.default)(blacklist, str.replace(/ /g, '')) && decimalRegExp(options).test(str); + } + + throw new Error("Invalid locale '".concat(options.locale, "'")); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isDivisibleBy.js b/node_modules/validator/lib/isDivisibleBy.js new file mode 100644 index 0000000..02408b3 --- /dev/null +++ b/node_modules/validator/lib/isDivisibleBy.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isDivisibleBy; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _toFloat = _interopRequireDefault(require("./toFloat")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isDivisibleBy(str, num) { + (0, _assertString.default)(str); + return (0, _toFloat.default)(str) % parseInt(num, 10) === 0; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isEAN.js b/node_modules/validator/lib/isEAN.js new file mode 100644 index 0000000..d08d78b --- /dev/null +++ b/node_modules/validator/lib/isEAN.js @@ -0,0 +1,85 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isEAN; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The most commonly used EAN standard is + * the thirteen-digit EAN-13, while the + * less commonly used 8-digit EAN-8 barcode was + * introduced for use on small packages. + * Also EAN/UCC-14 is used for Grouping of individual + * trade items above unit level(Intermediate, Carton or Pallet). + * For more info about EAN-14 checkout: https://www.gtin.info/itf-14-barcodes/ + * EAN consists of: + * GS1 prefix, manufacturer code, product code and check digit + * Reference: https://en.wikipedia.org/wiki/International_Article_Number + * Reference: https://www.gtin.info/ + */ + +/** + * Define EAN Lenghts; 8 for EAN-8; 13 for EAN-13; 14 for EAN-14 + * and Regular Expression for valid EANs (EAN-8, EAN-13, EAN-14), + * with exact numberic matching of 8 or 13 or 14 digits [0-9] + */ +var LENGTH_EAN_8 = 8; +var LENGTH_EAN_14 = 14; +var validEanRegex = /^(\d{8}|\d{13}|\d{14})$/; +/** + * Get position weight given: + * EAN length and digit index/position + * + * @param {number} length + * @param {number} index + * @return {number} + */ + +function getPositionWeightThroughLengthAndIndex(length, index) { + if (length === LENGTH_EAN_8 || length === LENGTH_EAN_14) { + return index % 2 === 0 ? 3 : 1; + } + + return index % 2 === 0 ? 1 : 3; +} +/** + * Calculate EAN Check Digit + * Reference: https://en.wikipedia.org/wiki/International_Article_Number#Calculation_of_checksum_digit + * + * @param {string} ean + * @return {number} + */ + + +function calculateCheckDigit(ean) { + var checksum = ean.slice(0, -1).split('').map(function (char, index) { + return Number(char) * getPositionWeightThroughLengthAndIndex(ean.length, index); + }).reduce(function (acc, partialSum) { + return acc + partialSum; + }, 0); + var remainder = 10 - checksum % 10; + return remainder < 10 ? remainder : 0; +} +/** + * Check if string is valid EAN: + * Matches EAN-8/EAN-13/EAN-14 regex + * Has valid check digit. + * + * @param {string} str + * @return {boolean} + */ + + +function isEAN(str) { + (0, _assertString.default)(str); + var actualCheckDigit = Number(str.slice(-1)); + return validEanRegex.test(str) && actualCheckDigit === calculateCheckDigit(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isEmail.js b/node_modules/validator/lib/isEmail.js new file mode 100644 index 0000000..bf08492 --- /dev/null +++ b/node_modules/validator/lib/isEmail.js @@ -0,0 +1,197 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isEmail; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +var _isByteLength = _interopRequireDefault(require("./isByteLength")); + +var _isFQDN = _interopRequireDefault(require("./isFQDN")); + +var _isIP = _interopRequireDefault(require("./isIP")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var default_email_options = { + allow_display_name: false, + require_display_name: false, + allow_utf8_local_part: true, + require_tld: true, + blacklisted_chars: '', + ignore_max_length: false, + host_blacklist: [] +}; +/* eslint-disable max-len */ + +/* eslint-disable no-control-regex */ + +var splitNameAddress = /^([^\x00-\x1F\x7F-\x9F\cX]+)]/.test(display_name_without_quotes); + + if (contains_illegal) { + // if contains illegal characters, + // must to be enclosed in double-quotes, otherwise it's not a valid display name + if (display_name_without_quotes === display_name) { + return false; + } // the quotes in display name must start with character symbol \ + + + var all_start_with_back_slash = display_name_without_quotes.split('"').length === display_name_without_quotes.split('\\"').length; + + if (!all_start_with_back_slash) { + return false; + } + } + + return true; +} + +function isEmail(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, default_email_options); + + if (options.require_display_name || options.allow_display_name) { + var display_email = str.match(splitNameAddress); + + if (display_email) { + var display_name = display_email[1]; // Remove display name and angle brackets to get email address + // Can be done in the regex but will introduce a ReDOS (See #1597 for more info) + + str = str.replace(display_name, '').replace(/(^<|>$)/g, ''); // sometimes need to trim the last space to get the display name + // because there may be a space between display name and email address + // eg. myname + // the display name is `myname` instead of `myname `, so need to trim the last space + + if (display_name.endsWith(' ')) { + display_name = display_name.substr(0, display_name.length - 1); + } + + if (!validateDisplayName(display_name)) { + return false; + } + } else if (options.require_display_name) { + return false; + } + } + + if (!options.ignore_max_length && str.length > defaultMaxEmailLength) { + return false; + } + + var parts = str.split('@'); + var domain = parts.pop(); + var lower_domain = domain.toLowerCase(); + + if (options.host_blacklist.includes(lower_domain)) { + return false; + } + + var user = parts.join('@'); + + if (options.domain_specific_validation && (lower_domain === 'gmail.com' || lower_domain === 'googlemail.com')) { + /* + Previously we removed dots for gmail addresses before validating. + This was removed because it allows `multiple..dots@gmail.com` + to be reported as valid, but it is not. + Gmail only normalizes single dots, removing them from here is pointless, + should be done in normalizeEmail + */ + user = user.toLowerCase(); // Removing sub-address from username before gmail validation + + var username = user.split('+')[0]; // Dots are not included in gmail length restriction + + if (!(0, _isByteLength.default)(username.replace(/\./g, ''), { + min: 6, + max: 30 + })) { + return false; + } + + var _user_parts = username.split('.'); + + for (var i = 0; i < _user_parts.length; i++) { + if (!gmailUserPart.test(_user_parts[i])) { + return false; + } + } + } + + if (options.ignore_max_length === false && (!(0, _isByteLength.default)(user, { + max: 64 + }) || !(0, _isByteLength.default)(domain, { + max: 254 + }))) { + return false; + } + + if (!(0, _isFQDN.default)(domain, { + require_tld: options.require_tld + })) { + if (!options.allow_ip_domain) { + return false; + } + + if (!(0, _isIP.default)(domain)) { + if (!domain.startsWith('[') || !domain.endsWith(']')) { + return false; + } + + var noBracketdomain = domain.substr(1, domain.length - 2); + + if (noBracketdomain.length === 0 || !(0, _isIP.default)(noBracketdomain)) { + return false; + } + } + } + + if (user[0] === '"') { + user = user.slice(1, user.length - 1); + return options.allow_utf8_local_part ? quotedEmailUserUtf8.test(user) : quotedEmailUser.test(user); + } + + var pattern = options.allow_utf8_local_part ? emailUserUtf8Part : emailUserPart; + var user_parts = user.split('.'); + + for (var _i = 0; _i < user_parts.length; _i++) { + if (!pattern.test(user_parts[_i])) { + return false; + } + } + + if (options.blacklisted_chars) { + if (user.search(new RegExp("[".concat(options.blacklisted_chars, "]+"), 'g')) !== -1) return false; + } + + return true; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isEmpty.js b/node_modules/validator/lib/isEmpty.js new file mode 100644 index 0000000..26766d5 --- /dev/null +++ b/node_modules/validator/lib/isEmpty.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isEmpty; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var default_is_empty_options = { + ignore_whitespace: false +}; + +function isEmpty(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, default_is_empty_options); + return (options.ignore_whitespace ? str.trim().length : str.length) === 0; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isEthereumAddress.js b/node_modules/validator/lib/isEthereumAddress.js new file mode 100644 index 0000000..e6999b9 --- /dev/null +++ b/node_modules/validator/lib/isEthereumAddress.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isEthereumAddress; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var eth = /^(0x)[0-9a-f]{40}$/i; + +function isEthereumAddress(str) { + (0, _assertString.default)(str); + return eth.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isFQDN.js b/node_modules/validator/lib/isFQDN.js new file mode 100644 index 0000000..73ce134 --- /dev/null +++ b/node_modules/validator/lib/isFQDN.js @@ -0,0 +1,89 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isFQDN; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var default_fqdn_options = { + require_tld: true, + allow_underscores: false, + allow_trailing_dot: false, + allow_numeric_tld: false, + allow_wildcard: false +}; + +function isFQDN(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, default_fqdn_options); + /* Remove the optional trailing dot before checking validity */ + + if (options.allow_trailing_dot && str[str.length - 1] === '.') { + str = str.substring(0, str.length - 1); + } + /* Remove the optional wildcard before checking validity */ + + + if (options.allow_wildcard === true && str.indexOf('*.') === 0) { + str = str.substring(2); + } + + var parts = str.split('.'); + var tld = parts[parts.length - 1]; + + if (options.require_tld) { + // disallow fqdns without tld + if (parts.length < 2) { + return false; + } + + if (!/^([a-z\u00A1-\u00A8\u00AA-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]{2,}|xn[a-z0-9-]{2,})$/i.test(tld)) { + return false; + } // disallow spaces + + + if (/\s/.test(tld)) { + return false; + } + } // reject numeric TLDs + + + if (!options.allow_numeric_tld && /^\d+$/.test(tld)) { + return false; + } + + return parts.every(function (part) { + if (part.length > 63) { + return false; + } + + if (!/^[a-z_\u00a1-\uffff0-9-]+$/i.test(part)) { + return false; + } // disallow full-width chars + + + if (/[\uff01-\uff5e]/.test(part)) { + return false; + } // disallow parts starting or ending with hyphen + + + if (/^-|-$/.test(part)) { + return false; + } + + if (!options.allow_underscores && /_/.test(part)) { + return false; + } + + return true; + }); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isFloat.js b/node_modules/validator/lib/isFloat.js new file mode 100644 index 0000000..3fdab86 --- /dev/null +++ b/node_modules/validator/lib/isFloat.js @@ -0,0 +1,29 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isFloat; +exports.locales = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _alpha = require("./alpha"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isFloat(str, options) { + (0, _assertString.default)(str); + options = options || {}; + var float = new RegExp("^(?:[-+])?(?:[0-9]+)?(?:\\".concat(options.locale ? _alpha.decimal[options.locale] : '.', "[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$")); + + if (str === '' || str === '.' || str === '-' || str === '+') { + return false; + } + + var value = parseFloat(str.replace(',', '.')); + return float.test(str) && (!options.hasOwnProperty('min') || value >= options.min) && (!options.hasOwnProperty('max') || value <= options.max) && (!options.hasOwnProperty('lt') || value < options.lt) && (!options.hasOwnProperty('gt') || value > options.gt); +} + +var locales = Object.keys(_alpha.decimal); +exports.locales = locales; \ No newline at end of file diff --git a/node_modules/validator/lib/isFullWidth.js b/node_modules/validator/lib/isFullWidth.js new file mode 100644 index 0000000..1960f13 --- /dev/null +++ b/node_modules/validator/lib/isFullWidth.js @@ -0,0 +1,19 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isFullWidth; +exports.fullWidth = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var fullWidth = /[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/; +exports.fullWidth = fullWidth; + +function isFullWidth(str) { + (0, _assertString.default)(str); + return fullWidth.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/lib/isHSL.js b/node_modules/validator/lib/isHSL.js new file mode 100644 index 0000000..0590b3e --- /dev/null +++ b/node_modules/validator/lib/isHSL.js @@ -0,0 +1,28 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isHSL; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var hslComma = /^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(,(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}(,((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?))?\)$/i; +var hslSpace = /^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(\s(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}\s?(\/\s((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?)\s?)?\)$/i; + +function isHSL(str) { + (0, _assertString.default)(str); // Strip duplicate spaces before calling the validation regex (See #1598 for more info) + + var strippedStr = str.replace(/\s+/g, ' ').replace(/\s?(hsla?\(|\)|,)\s?/ig, '$1'); + + if (strippedStr.indexOf(',') !== -1) { + return hslComma.test(strippedStr); + } + + return hslSpace.test(strippedStr); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isHalfWidth.js b/node_modules/validator/lib/isHalfWidth.js new file mode 100644 index 0000000..55a9e1a --- /dev/null +++ b/node_modules/validator/lib/isHalfWidth.js @@ -0,0 +1,19 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isHalfWidth; +exports.halfWidth = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var halfWidth = /[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/; +exports.halfWidth = halfWidth; + +function isHalfWidth(str) { + (0, _assertString.default)(str); + return halfWidth.test(str); +} \ No newline at end of file diff --git a/node_modules/validator/lib/isHash.js b/node_modules/validator/lib/isHash.js new file mode 100644 index 0000000..1083966 --- /dev/null +++ b/node_modules/validator/lib/isHash.js @@ -0,0 +1,35 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isHash; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var lengths = { + md5: 32, + md4: 32, + sha1: 40, + sha256: 64, + sha384: 96, + sha512: 128, + ripemd128: 32, + ripemd160: 40, + tiger128: 32, + tiger160: 40, + tiger192: 48, + crc32: 8, + crc32b: 8 +}; + +function isHash(str, algorithm) { + (0, _assertString.default)(str); + var hash = new RegExp("^[a-fA-F0-9]{".concat(lengths[algorithm], "}$")); + return hash.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isHexColor.js b/node_modules/validator/lib/isHexColor.js new file mode 100644 index 0000000..7af3889 --- /dev/null +++ b/node_modules/validator/lib/isHexColor.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isHexColor; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var hexcolor = /^#?([0-9A-F]{3}|[0-9A-F]{4}|[0-9A-F]{6}|[0-9A-F]{8})$/i; + +function isHexColor(str) { + (0, _assertString.default)(str); + return hexcolor.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isHexadecimal.js b/node_modules/validator/lib/isHexadecimal.js new file mode 100644 index 0000000..a1cf738 --- /dev/null +++ b/node_modules/validator/lib/isHexadecimal.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isHexadecimal; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var hexadecimal = /^(0x|0h)?[0-9A-F]+$/i; + +function isHexadecimal(str) { + (0, _assertString.default)(str); + return hexadecimal.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isIBAN.js b/node_modules/validator/lib/isIBAN.js new file mode 100644 index 0000000..ef804f2 --- /dev/null +++ b/node_modules/validator/lib/isIBAN.js @@ -0,0 +1,150 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isIBAN; +exports.locales = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * List of country codes with + * corresponding IBAN regular expression + * Reference: https://en.wikipedia.org/wiki/International_Bank_Account_Number + */ +var ibanRegexThroughCountryCode = { + AD: /^(AD[0-9]{2})\d{8}[A-Z0-9]{12}$/, + AE: /^(AE[0-9]{2})\d{3}\d{16}$/, + AL: /^(AL[0-9]{2})\d{8}[A-Z0-9]{16}$/, + AT: /^(AT[0-9]{2})\d{16}$/, + AZ: /^(AZ[0-9]{2})[A-Z0-9]{4}\d{20}$/, + BA: /^(BA[0-9]{2})\d{16}$/, + BE: /^(BE[0-9]{2})\d{12}$/, + BG: /^(BG[0-9]{2})[A-Z]{4}\d{6}[A-Z0-9]{8}$/, + BH: /^(BH[0-9]{2})[A-Z]{4}[A-Z0-9]{14}$/, + BR: /^(BR[0-9]{2})\d{23}[A-Z]{1}[A-Z0-9]{1}$/, + BY: /^(BY[0-9]{2})[A-Z0-9]{4}\d{20}$/, + CH: /^(CH[0-9]{2})\d{5}[A-Z0-9]{12}$/, + CR: /^(CR[0-9]{2})\d{18}$/, + CY: /^(CY[0-9]{2})\d{8}[A-Z0-9]{16}$/, + CZ: /^(CZ[0-9]{2})\d{20}$/, + DE: /^(DE[0-9]{2})\d{18}$/, + DK: /^(DK[0-9]{2})\d{14}$/, + DO: /^(DO[0-9]{2})[A-Z]{4}\d{20}$/, + EE: /^(EE[0-9]{2})\d{16}$/, + EG: /^(EG[0-9]{2})\d{25}$/, + ES: /^(ES[0-9]{2})\d{20}$/, + FI: /^(FI[0-9]{2})\d{14}$/, + FO: /^(FO[0-9]{2})\d{14}$/, + FR: /^(FR[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/, + GB: /^(GB[0-9]{2})[A-Z]{4}\d{14}$/, + GE: /^(GE[0-9]{2})[A-Z0-9]{2}\d{16}$/, + GI: /^(GI[0-9]{2})[A-Z]{4}[A-Z0-9]{15}$/, + GL: /^(GL[0-9]{2})\d{14}$/, + GR: /^(GR[0-9]{2})\d{7}[A-Z0-9]{16}$/, + GT: /^(GT[0-9]{2})[A-Z0-9]{4}[A-Z0-9]{20}$/, + HR: /^(HR[0-9]{2})\d{17}$/, + HU: /^(HU[0-9]{2})\d{24}$/, + IE: /^(IE[0-9]{2})[A-Z0-9]{4}\d{14}$/, + IL: /^(IL[0-9]{2})\d{19}$/, + IQ: /^(IQ[0-9]{2})[A-Z]{4}\d{15}$/, + IR: /^(IR[0-9]{2})0\d{2}0\d{18}$/, + IS: /^(IS[0-9]{2})\d{22}$/, + IT: /^(IT[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/, + JO: /^(JO[0-9]{2})[A-Z]{4}\d{22}$/, + KW: /^(KW[0-9]{2})[A-Z]{4}[A-Z0-9]{22}$/, + KZ: /^(KZ[0-9]{2})\d{3}[A-Z0-9]{13}$/, + LB: /^(LB[0-9]{2})\d{4}[A-Z0-9]{20}$/, + LC: /^(LC[0-9]{2})[A-Z]{4}[A-Z0-9]{24}$/, + LI: /^(LI[0-9]{2})\d{5}[A-Z0-9]{12}$/, + LT: /^(LT[0-9]{2})\d{16}$/, + LU: /^(LU[0-9]{2})\d{3}[A-Z0-9]{13}$/, + LV: /^(LV[0-9]{2})[A-Z]{4}[A-Z0-9]{13}$/, + MC: /^(MC[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/, + MD: /^(MD[0-9]{2})[A-Z0-9]{20}$/, + ME: /^(ME[0-9]{2})\d{18}$/, + MK: /^(MK[0-9]{2})\d{3}[A-Z0-9]{10}\d{2}$/, + MR: /^(MR[0-9]{2})\d{23}$/, + MT: /^(MT[0-9]{2})[A-Z]{4}\d{5}[A-Z0-9]{18}$/, + MU: /^(MU[0-9]{2})[A-Z]{4}\d{19}[A-Z]{3}$/, + MZ: /^(MZ[0-9]{2})\d{21}$/, + NL: /^(NL[0-9]{2})[A-Z]{4}\d{10}$/, + NO: /^(NO[0-9]{2})\d{11}$/, + PK: /^(PK[0-9]{2})[A-Z0-9]{4}\d{16}$/, + PL: /^(PL[0-9]{2})\d{24}$/, + PS: /^(PS[0-9]{2})[A-Z0-9]{4}\d{21}$/, + PT: /^(PT[0-9]{2})\d{21}$/, + QA: /^(QA[0-9]{2})[A-Z]{4}[A-Z0-9]{21}$/, + RO: /^(RO[0-9]{2})[A-Z]{4}[A-Z0-9]{16}$/, + RS: /^(RS[0-9]{2})\d{18}$/, + SA: /^(SA[0-9]{2})\d{2}[A-Z0-9]{18}$/, + SC: /^(SC[0-9]{2})[A-Z]{4}\d{20}[A-Z]{3}$/, + SE: /^(SE[0-9]{2})\d{20}$/, + SI: /^(SI[0-9]{2})\d{15}$/, + SK: /^(SK[0-9]{2})\d{20}$/, + SM: /^(SM[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/, + SV: /^(SV[0-9]{2})[A-Z0-9]{4}\d{20}$/, + TL: /^(TL[0-9]{2})\d{19}$/, + TN: /^(TN[0-9]{2})\d{20}$/, + TR: /^(TR[0-9]{2})\d{5}[A-Z0-9]{17}$/, + UA: /^(UA[0-9]{2})\d{6}[A-Z0-9]{19}$/, + VA: /^(VA[0-9]{2})\d{18}$/, + VG: /^(VG[0-9]{2})[A-Z0-9]{4}\d{16}$/, + XK: /^(XK[0-9]{2})\d{16}$/ +}; +/** + * Check whether string has correct universal IBAN format + * The IBAN consists of up to 34 alphanumeric characters, as follows: + * Country Code using ISO 3166-1 alpha-2, two letters + * check digits, two digits and + * Basic Bank Account Number (BBAN), up to 30 alphanumeric characters. + * NOTE: Permitted IBAN characters are: digits [0-9] and the 26 latin alphabetic [A-Z] + * + * @param {string} str - string under validation + * @return {boolean} + */ + +function hasValidIbanFormat(str) { + // Strip white spaces and hyphens + var strippedStr = str.replace(/[\s\-]+/gi, '').toUpperCase(); + var isoCountryCode = strippedStr.slice(0, 2).toUpperCase(); + return isoCountryCode in ibanRegexThroughCountryCode && ibanRegexThroughCountryCode[isoCountryCode].test(strippedStr); +} +/** + * Check whether string has valid IBAN Checksum + * by performing basic mod-97 operation and + * the remainder should equal 1 + * -- Start by rearranging the IBAN by moving the four initial characters to the end of the string + * -- Replace each letter in the string with two digits, A -> 10, B = 11, Z = 35 + * -- Interpret the string as a decimal integer and + * -- compute the remainder on division by 97 (mod 97) + * Reference: https://en.wikipedia.org/wiki/International_Bank_Account_Number + * + * @param {string} str + * @return {boolean} + */ + + +function hasValidIbanChecksum(str) { + var strippedStr = str.replace(/[^A-Z0-9]+/gi, '').toUpperCase(); // Keep only digits and A-Z latin alphabetic + + var rearranged = strippedStr.slice(4) + strippedStr.slice(0, 4); + var alphaCapsReplacedWithDigits = rearranged.replace(/[A-Z]/g, function (char) { + return char.charCodeAt(0) - 55; + }); + var remainder = alphaCapsReplacedWithDigits.match(/\d{1,7}/g).reduce(function (acc, value) { + return Number(acc + value) % 97; + }, ''); + return remainder === 1; +} + +function isIBAN(str) { + (0, _assertString.default)(str); + return hasValidIbanFormat(str) && hasValidIbanChecksum(str); +} + +var locales = Object.keys(ibanRegexThroughCountryCode); +exports.locales = locales; \ No newline at end of file diff --git a/node_modules/validator/lib/isIMEI.js b/node_modules/validator/lib/isIMEI.js new file mode 100644 index 0000000..aa05178 --- /dev/null +++ b/node_modules/validator/lib/isIMEI.js @@ -0,0 +1,61 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isIMEI; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var imeiRegexWithoutHypens = /^[0-9]{15}$/; +var imeiRegexWithHypens = /^\d{2}-\d{6}-\d{6}-\d{1}$/; + +function isIMEI(str, options) { + (0, _assertString.default)(str); + options = options || {}; // default regex for checking imei is the one without hyphens + + var imeiRegex = imeiRegexWithoutHypens; + + if (options.allow_hyphens) { + imeiRegex = imeiRegexWithHypens; + } + + if (!imeiRegex.test(str)) { + return false; + } + + str = str.replace(/-/g, ''); + var sum = 0, + mul = 2, + l = 14; + + for (var i = 0; i < l; i++) { + var digit = str.substring(l - i - 1, l - i); + var tp = parseInt(digit, 10) * mul; + + if (tp >= 10) { + sum += tp % 10 + 1; + } else { + sum += tp; + } + + if (mul === 1) { + mul += 1; + } else { + mul -= 1; + } + } + + var chk = (10 - sum % 10) % 10; + + if (chk !== parseInt(str.substring(14, 15), 10)) { + return false; + } + + return true; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isIP.js b/node_modules/validator/lib/isIP.js new file mode 100644 index 0000000..2b470af --- /dev/null +++ b/node_modules/validator/lib/isIP.js @@ -0,0 +1,75 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isIP; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** +11.3. Examples + + The following addresses + + fe80::1234 (on the 1st link of the node) + ff02::5678 (on the 5th link of the node) + ff08::9abc (on the 10th organization of the node) + + would be represented as follows: + + fe80::1234%1 + ff02::5678%5 + ff08::9abc%10 + + (Here we assume a natural translation from a zone index to the + part, where the Nth zone of any scope is translated into + "N".) + + If we use interface names as , those addresses could also be + represented as follows: + + fe80::1234%ne0 + ff02::5678%pvc1.3 + ff08::9abc%interface10 + + where the interface "ne0" belongs to the 1st link, "pvc1.3" belongs + to the 5th link, and "interface10" belongs to the 10th organization. + * * */ +var IPv4SegmentFormat = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'; +var IPv4AddressFormat = "(".concat(IPv4SegmentFormat, "[.]){3}").concat(IPv4SegmentFormat); +var IPv4AddressRegExp = new RegExp("^".concat(IPv4AddressFormat, "$")); +var IPv6SegmentFormat = '(?:[0-9a-fA-F]{1,4})'; +var IPv6AddressRegExp = new RegExp('^(' + "(?:".concat(IPv6SegmentFormat, ":){7}(?:").concat(IPv6SegmentFormat, "|:)|") + "(?:".concat(IPv6SegmentFormat, ":){6}(?:").concat(IPv4AddressFormat, "|:").concat(IPv6SegmentFormat, "|:)|") + "(?:".concat(IPv6SegmentFormat, ":){5}(?::").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,2}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){4}(?:(:").concat(IPv6SegmentFormat, "){0,1}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,3}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){3}(?:(:").concat(IPv6SegmentFormat, "){0,2}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,4}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){2}(?:(:").concat(IPv6SegmentFormat, "){0,3}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,5}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){1}(?:(:").concat(IPv6SegmentFormat, "){0,4}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,6}|:)|") + "(?::((?::".concat(IPv6SegmentFormat, "){0,5}:").concat(IPv4AddressFormat, "|(?::").concat(IPv6SegmentFormat, "){1,7}|:))") + ')(%[0-9a-zA-Z-.:]{1,})?$'); + +function isIP(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + (0, _assertString.default)(str); + version = String(version); + + if (!version) { + return isIP(str, 4) || isIP(str, 6); + } + + if (version === '4') { + if (!IPv4AddressRegExp.test(str)) { + return false; + } + + var parts = str.split('.').sort(function (a, b) { + return a - b; + }); + return parts[3] <= 255; + } + + if (version === '6') { + return !!IPv6AddressRegExp.test(str); + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isIPRange.js b/node_modules/validator/lib/isIPRange.js new file mode 100644 index 0000000..293a1a2 --- /dev/null +++ b/node_modules/validator/lib/isIPRange.js @@ -0,0 +1,62 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isIPRange; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isIP = _interopRequireDefault(require("./isIP")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var subnetMaybe = /^\d{1,3}$/; +var v4Subnet = 32; +var v6Subnet = 128; + +function isIPRange(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + (0, _assertString.default)(str); + var parts = str.split('/'); // parts[0] -> ip, parts[1] -> subnet + + if (parts.length !== 2) { + return false; + } + + if (!subnetMaybe.test(parts[1])) { + return false; + } // Disallow preceding 0 i.e. 01, 02, ... + + + if (parts[1].length > 1 && parts[1].startsWith('0')) { + return false; + } + + var isValidIP = (0, _isIP.default)(parts[0], version); + + if (!isValidIP) { + return false; + } // Define valid subnet according to IP's version + + + var expectedSubnet = null; + + switch (String(version)) { + case '4': + expectedSubnet = v4Subnet; + break; + + case '6': + expectedSubnet = v6Subnet; + break; + + default: + expectedSubnet = (0, _isIP.default)(parts[0], '6') ? v6Subnet : v4Subnet; + } + + return parts[1] <= expectedSubnet && parts[1] >= 0; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isISBN.js b/node_modules/validator/lib/isISBN.js new file mode 100644 index 0000000..f00bb7a --- /dev/null +++ b/node_modules/validator/lib/isISBN.js @@ -0,0 +1,65 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISBN; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var isbn10Maybe = /^(?:[0-9]{9}X|[0-9]{10})$/; +var isbn13Maybe = /^(?:[0-9]{13})$/; +var factor = [1, 3]; + +function isISBN(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + (0, _assertString.default)(str); + version = String(version); + + if (!version) { + return isISBN(str, 10) || isISBN(str, 13); + } + + var sanitized = str.replace(/[\s-]+/g, ''); + var checksum = 0; + var i; + + if (version === '10') { + if (!isbn10Maybe.test(sanitized)) { + return false; + } + + for (i = 0; i < 9; i++) { + checksum += (i + 1) * sanitized.charAt(i); + } + + if (sanitized.charAt(9) === 'X') { + checksum += 10 * 10; + } else { + checksum += 10 * sanitized.charAt(9); + } + + if (checksum % 11 === 0) { + return !!sanitized; + } + } else if (version === '13') { + if (!isbn13Maybe.test(sanitized)) { + return false; + } + + for (i = 0; i < 12; i++) { + checksum += factor[i % 2] * sanitized.charAt(i); + } + + if (sanitized.charAt(12) - (10 - checksum % 10) % 10 === 0) { + return !!sanitized; + } + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isISIN.js b/node_modules/validator/lib/isISIN.js new file mode 100644 index 0000000..3943890 --- /dev/null +++ b/node_modules/validator/lib/isISIN.js @@ -0,0 +1,73 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISIN; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var isin = /^[A-Z]{2}[0-9A-Z]{9}[0-9]$/; // this link details how the check digit is calculated: +// https://www.isin.org/isin-format/. it is a little bit +// odd in that it works with digits, not numbers. in order +// to make only one pass through the ISIN characters, the +// each alpha character is handled as 2 characters within +// the loop. + +function isISIN(str) { + (0, _assertString.default)(str); + + if (!isin.test(str)) { + return false; + } + + var double = true; + var sum = 0; // convert values + + for (var i = str.length - 2; i >= 0; i--) { + if (str[i] >= 'A' && str[i] <= 'Z') { + var value = str[i].charCodeAt(0) - 55; + var lo = value % 10; + var hi = Math.trunc(value / 10); // letters have two digits, so handle the low order + // and high order digits separately. + + for (var _i = 0, _arr = [lo, hi]; _i < _arr.length; _i++) { + var digit = _arr[_i]; + + if (double) { + if (digit >= 5) { + sum += 1 + (digit - 5) * 2; + } else { + sum += digit * 2; + } + } else { + sum += digit; + } + + double = !double; + } + } else { + var _digit = str[i].charCodeAt(0) - '0'.charCodeAt(0); + + if (double) { + if (_digit >= 5) { + sum += 1 + (_digit - 5) * 2; + } else { + sum += _digit * 2; + } + } else { + sum += _digit; + } + + double = !double; + } + } + + var check = Math.trunc((sum + 9) / 10) * 10 - sum; + return +str[str.length - 1] === check; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isISO31661Alpha2.js b/node_modules/validator/lib/isISO31661Alpha2.js new file mode 100644 index 0000000..d15bb5b --- /dev/null +++ b/node_modules/validator/lib/isISO31661Alpha2.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISO31661Alpha2; +exports.CountryCodes = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 +var validISO31661Alpha2CountriesCodes = new Set(['AD', 'AE', 'AF', 'AG', 'AI', 'AL', 'AM', 'AO', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AW', 'AX', 'AZ', 'BA', 'BB', 'BD', 'BE', 'BF', 'BG', 'BH', 'BI', 'BJ', 'BL', 'BM', 'BN', 'BO', 'BQ', 'BR', 'BS', 'BT', 'BV', 'BW', 'BY', 'BZ', 'CA', 'CC', 'CD', 'CF', 'CG', 'CH', 'CI', 'CK', 'CL', 'CM', 'CN', 'CO', 'CR', 'CU', 'CV', 'CW', 'CX', 'CY', 'CZ', 'DE', 'DJ', 'DK', 'DM', 'DO', 'DZ', 'EC', 'EE', 'EG', 'EH', 'ER', 'ES', 'ET', 'FI', 'FJ', 'FK', 'FM', 'FO', 'FR', 'GA', 'GB', 'GD', 'GE', 'GF', 'GG', 'GH', 'GI', 'GL', 'GM', 'GN', 'GP', 'GQ', 'GR', 'GS', 'GT', 'GU', 'GW', 'GY', 'HK', 'HM', 'HN', 'HR', 'HT', 'HU', 'ID', 'IE', 'IL', 'IM', 'IN', 'IO', 'IQ', 'IR', 'IS', 'IT', 'JE', 'JM', 'JO', 'JP', 'KE', 'KG', 'KH', 'KI', 'KM', 'KN', 'KP', 'KR', 'KW', 'KY', 'KZ', 'LA', 'LB', 'LC', 'LI', 'LK', 'LR', 'LS', 'LT', 'LU', 'LV', 'LY', 'MA', 'MC', 'MD', 'ME', 'MF', 'MG', 'MH', 'MK', 'ML', 'MM', 'MN', 'MO', 'MP', 'MQ', 'MR', 'MS', 'MT', 'MU', 'MV', 'MW', 'MX', 'MY', 'MZ', 'NA', 'NC', 'NE', 'NF', 'NG', 'NI', 'NL', 'NO', 'NP', 'NR', 'NU', 'NZ', 'OM', 'PA', 'PE', 'PF', 'PG', 'PH', 'PK', 'PL', 'PM', 'PN', 'PR', 'PS', 'PT', 'PW', 'PY', 'QA', 'RE', 'RO', 'RS', 'RU', 'RW', 'SA', 'SB', 'SC', 'SD', 'SE', 'SG', 'SH', 'SI', 'SJ', 'SK', 'SL', 'SM', 'SN', 'SO', 'SR', 'SS', 'ST', 'SV', 'SX', 'SY', 'SZ', 'TC', 'TD', 'TF', 'TG', 'TH', 'TJ', 'TK', 'TL', 'TM', 'TN', 'TO', 'TR', 'TT', 'TV', 'TW', 'TZ', 'UA', 'UG', 'UM', 'US', 'UY', 'UZ', 'VA', 'VC', 'VE', 'VG', 'VI', 'VN', 'VU', 'WF', 'WS', 'YE', 'YT', 'ZA', 'ZM', 'ZW']); + +function isISO31661Alpha2(str) { + (0, _assertString.default)(str); + return validISO31661Alpha2CountriesCodes.has(str.toUpperCase()); +} + +var CountryCodes = validISO31661Alpha2CountriesCodes; +exports.CountryCodes = CountryCodes; \ No newline at end of file diff --git a/node_modules/validator/lib/isISO31661Alpha3.js b/node_modules/validator/lib/isISO31661Alpha3.js new file mode 100644 index 0000000..cdb774f --- /dev/null +++ b/node_modules/validator/lib/isISO31661Alpha3.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISO31661Alpha3; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-3 +var validISO31661Alpha3CountriesCodes = new Set(['AFG', 'ALA', 'ALB', 'DZA', 'ASM', 'AND', 'AGO', 'AIA', 'ATA', 'ATG', 'ARG', 'ARM', 'ABW', 'AUS', 'AUT', 'AZE', 'BHS', 'BHR', 'BGD', 'BRB', 'BLR', 'BEL', 'BLZ', 'BEN', 'BMU', 'BTN', 'BOL', 'BES', 'BIH', 'BWA', 'BVT', 'BRA', 'IOT', 'BRN', 'BGR', 'BFA', 'BDI', 'KHM', 'CMR', 'CAN', 'CPV', 'CYM', 'CAF', 'TCD', 'CHL', 'CHN', 'CXR', 'CCK', 'COL', 'COM', 'COG', 'COD', 'COK', 'CRI', 'CIV', 'HRV', 'CUB', 'CUW', 'CYP', 'CZE', 'DNK', 'DJI', 'DMA', 'DOM', 'ECU', 'EGY', 'SLV', 'GNQ', 'ERI', 'EST', 'ETH', 'FLK', 'FRO', 'FJI', 'FIN', 'FRA', 'GUF', 'PYF', 'ATF', 'GAB', 'GMB', 'GEO', 'DEU', 'GHA', 'GIB', 'GRC', 'GRL', 'GRD', 'GLP', 'GUM', 'GTM', 'GGY', 'GIN', 'GNB', 'GUY', 'HTI', 'HMD', 'VAT', 'HND', 'HKG', 'HUN', 'ISL', 'IND', 'IDN', 'IRN', 'IRQ', 'IRL', 'IMN', 'ISR', 'ITA', 'JAM', 'JPN', 'JEY', 'JOR', 'KAZ', 'KEN', 'KIR', 'PRK', 'KOR', 'KWT', 'KGZ', 'LAO', 'LVA', 'LBN', 'LSO', 'LBR', 'LBY', 'LIE', 'LTU', 'LUX', 'MAC', 'MKD', 'MDG', 'MWI', 'MYS', 'MDV', 'MLI', 'MLT', 'MHL', 'MTQ', 'MRT', 'MUS', 'MYT', 'MEX', 'FSM', 'MDA', 'MCO', 'MNG', 'MNE', 'MSR', 'MAR', 'MOZ', 'MMR', 'NAM', 'NRU', 'NPL', 'NLD', 'NCL', 'NZL', 'NIC', 'NER', 'NGA', 'NIU', 'NFK', 'MNP', 'NOR', 'OMN', 'PAK', 'PLW', 'PSE', 'PAN', 'PNG', 'PRY', 'PER', 'PHL', 'PCN', 'POL', 'PRT', 'PRI', 'QAT', 'REU', 'ROU', 'RUS', 'RWA', 'BLM', 'SHN', 'KNA', 'LCA', 'MAF', 'SPM', 'VCT', 'WSM', 'SMR', 'STP', 'SAU', 'SEN', 'SRB', 'SYC', 'SLE', 'SGP', 'SXM', 'SVK', 'SVN', 'SLB', 'SOM', 'ZAF', 'SGS', 'SSD', 'ESP', 'LKA', 'SDN', 'SUR', 'SJM', 'SWZ', 'SWE', 'CHE', 'SYR', 'TWN', 'TJK', 'TZA', 'THA', 'TLS', 'TGO', 'TKL', 'TON', 'TTO', 'TUN', 'TUR', 'TKM', 'TCA', 'TUV', 'UGA', 'UKR', 'ARE', 'GBR', 'USA', 'UMI', 'URY', 'UZB', 'VUT', 'VEN', 'VNM', 'VGB', 'VIR', 'WLF', 'ESH', 'YEM', 'ZMB', 'ZWE']); + +function isISO31661Alpha3(str) { + (0, _assertString.default)(str); + return validISO31661Alpha3CountriesCodes.has(str.toUpperCase()); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isISO4217.js b/node_modules/validator/lib/isISO4217.js new file mode 100644 index 0000000..fd052f3 --- /dev/null +++ b/node_modules/validator/lib/isISO4217.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISO4217; +exports.CurrencyCodes = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// from https://en.wikipedia.org/wiki/ISO_4217 +var validISO4217CurrencyCodes = new Set(['AED', 'AFN', 'ALL', 'AMD', 'ANG', 'AOA', 'ARS', 'AUD', 'AWG', 'AZN', 'BAM', 'BBD', 'BDT', 'BGN', 'BHD', 'BIF', 'BMD', 'BND', 'BOB', 'BOV', 'BRL', 'BSD', 'BTN', 'BWP', 'BYN', 'BZD', 'CAD', 'CDF', 'CHE', 'CHF', 'CHW', 'CLF', 'CLP', 'CNY', 'COP', 'COU', 'CRC', 'CUC', 'CUP', 'CVE', 'CZK', 'DJF', 'DKK', 'DOP', 'DZD', 'EGP', 'ERN', 'ETB', 'EUR', 'FJD', 'FKP', 'GBP', 'GEL', 'GHS', 'GIP', 'GMD', 'GNF', 'GTQ', 'GYD', 'HKD', 'HNL', 'HRK', 'HTG', 'HUF', 'IDR', 'ILS', 'INR', 'IQD', 'IRR', 'ISK', 'JMD', 'JOD', 'JPY', 'KES', 'KGS', 'KHR', 'KMF', 'KPW', 'KRW', 'KWD', 'KYD', 'KZT', 'LAK', 'LBP', 'LKR', 'LRD', 'LSL', 'LYD', 'MAD', 'MDL', 'MGA', 'MKD', 'MMK', 'MNT', 'MOP', 'MRU', 'MUR', 'MVR', 'MWK', 'MXN', 'MXV', 'MYR', 'MZN', 'NAD', 'NGN', 'NIO', 'NOK', 'NPR', 'NZD', 'OMR', 'PAB', 'PEN', 'PGK', 'PHP', 'PKR', 'PLN', 'PYG', 'QAR', 'RON', 'RSD', 'RUB', 'RWF', 'SAR', 'SBD', 'SCR', 'SDG', 'SEK', 'SGD', 'SHP', 'SLL', 'SOS', 'SRD', 'SSP', 'STN', 'SVC', 'SYP', 'SZL', 'THB', 'TJS', 'TMT', 'TND', 'TOP', 'TRY', 'TTD', 'TWD', 'TZS', 'UAH', 'UGX', 'USD', 'USN', 'UYI', 'UYU', 'UYW', 'UZS', 'VES', 'VND', 'VUV', 'WST', 'XAF', 'XAG', 'XAU', 'XBA', 'XBB', 'XBC', 'XBD', 'XCD', 'XDR', 'XOF', 'XPD', 'XPF', 'XPT', 'XSU', 'XTS', 'XUA', 'XXX', 'YER', 'ZAR', 'ZMW', 'ZWL']); + +function isISO4217(str) { + (0, _assertString.default)(str); + return validISO4217CurrencyCodes.has(str.toUpperCase()); +} + +var CurrencyCodes = validISO4217CurrencyCodes; +exports.CurrencyCodes = CurrencyCodes; \ No newline at end of file diff --git a/node_modules/validator/lib/isISO8601.js b/node_modules/validator/lib/isISO8601.js new file mode 100644 index 0000000..b739606 --- /dev/null +++ b/node_modules/validator/lib/isISO8601.js @@ -0,0 +1,59 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISO8601; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* eslint-disable max-len */ +// from http://goo.gl/0ejHHW +var iso8601 = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; // same as above, except with a strict 'T' separator between date and time + +var iso8601StrictSeparator = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; +/* eslint-enable max-len */ + +var isValidDate = function isValidDate(str) { + // str must have passed the ISO8601 check + // this check is meant to catch invalid dates + // like 2009-02-31 + // first check for ordinal dates + var ordinalMatch = str.match(/^(\d{4})-?(\d{3})([ T]{1}\.*|$)/); + + if (ordinalMatch) { + var oYear = Number(ordinalMatch[1]); + var oDay = Number(ordinalMatch[2]); // if is leap year + + if (oYear % 4 === 0 && oYear % 100 !== 0 || oYear % 400 === 0) return oDay <= 366; + return oDay <= 365; + } + + var match = str.match(/(\d{4})-?(\d{0,2})-?(\d*)/).map(Number); + var year = match[1]; + var month = match[2]; + var day = match[3]; + var monthString = month ? "0".concat(month).slice(-2) : month; + var dayString = day ? "0".concat(day).slice(-2) : day; // create a date object and compare + + var d = new Date("".concat(year, "-").concat(monthString || '01', "-").concat(dayString || '01')); + + if (month && day) { + return d.getUTCFullYear() === year && d.getUTCMonth() + 1 === month && d.getUTCDate() === day; + } + + return true; +}; + +function isISO8601(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + (0, _assertString.default)(str); + var check = options.strictSeparator ? iso8601StrictSeparator.test(str) : iso8601.test(str); + if (check && options.strict) return isValidDate(str); + return check; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isISRC.js b/node_modules/validator/lib/isISRC.js new file mode 100644 index 0000000..c5ce1e2 --- /dev/null +++ b/node_modules/validator/lib/isISRC.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISRC; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// see http://isrc.ifpi.org/en/isrc-standard/code-syntax +var isrc = /^[A-Z]{2}[0-9A-Z]{3}\d{2}\d{5}$/; + +function isISRC(str) { + (0, _assertString.default)(str); + return isrc.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isISSN.js b/node_modules/validator/lib/isISSN.js new file mode 100644 index 0000000..eee87b3 --- /dev/null +++ b/node_modules/validator/lib/isISSN.js @@ -0,0 +1,37 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isISSN; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var issn = '^\\d{4}-?\\d{3}[\\dX]$'; + +function isISSN(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + (0, _assertString.default)(str); + var testIssn = issn; + testIssn = options.require_hyphen ? testIssn.replace('?', '') : testIssn; + testIssn = options.case_sensitive ? new RegExp(testIssn) : new RegExp(testIssn, 'i'); + + if (!testIssn.test(str)) { + return false; + } + + var digits = str.replace('-', '').toUpperCase(); + var checksum = 0; + + for (var i = 0; i < digits.length; i++) { + var digit = digits[i]; + checksum += (digit === 'X' ? 10 : +digit) * (8 - i); + } + + return checksum % 11 === 0; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isIdentityCard.js b/node_modules/validator/lib/isIdentityCard.js new file mode 100644 index 0000000..993f2ad --- /dev/null +++ b/node_modules/validator/lib/isIdentityCard.js @@ -0,0 +1,384 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isIdentityCard; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isInt = _interopRequireDefault(require("./isInt")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var validators = { + PL: function PL(str) { + (0, _assertString.default)(str); + var weightOfDigits = { + 1: 1, + 2: 3, + 3: 7, + 4: 9, + 5: 1, + 6: 3, + 7: 7, + 8: 9, + 9: 1, + 10: 3, + 11: 0 + }; + + if (str != null && str.length === 11 && (0, _isInt.default)(str, { + allow_leading_zeroes: true + })) { + var digits = str.split('').slice(0, -1); + var sum = digits.reduce(function (acc, digit, index) { + return acc + Number(digit) * weightOfDigits[index + 1]; + }, 0); + var modulo = sum % 10; + var lastDigit = Number(str.charAt(str.length - 1)); + + if (modulo === 0 && lastDigit === 0 || lastDigit === 10 - modulo) { + return true; + } + } + + return false; + }, + ES: function ES(str) { + (0, _assertString.default)(str); + var DNI = /^[0-9X-Z][0-9]{7}[TRWAGMYFPDXBNJZSQVHLCKE]$/; + var charsValue = { + X: 0, + Y: 1, + Z: 2 + }; + var controlDigits = ['T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J', 'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E']; // sanitize user input + + var sanitized = str.trim().toUpperCase(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } // validate the control digit + + + var number = sanitized.slice(0, -1).replace(/[X,Y,Z]/g, function (char) { + return charsValue[char]; + }); + return sanitized.endsWith(controlDigits[number % 23]); + }, + FI: function FI(str) { + // https://dvv.fi/en/personal-identity-code#:~:text=control%20character%20for%20a-,personal,-identity%20code%20calculated + (0, _assertString.default)(str); + + if (str.length !== 11) { + return false; + } + + if (!str.match(/^\d{6}[\-A\+]\d{3}[0-9ABCDEFHJKLMNPRSTUVWXY]{1}$/)) { + return false; + } + + var checkDigits = '0123456789ABCDEFHJKLMNPRSTUVWXY'; + var idAsNumber = parseInt(str.slice(0, 6), 10) * 1000 + parseInt(str.slice(7, 10), 10); + var remainder = idAsNumber % 31; + var checkDigit = checkDigits[remainder]; + return checkDigit === str.slice(10, 11); + }, + IN: function IN(str) { + var DNI = /^[1-9]\d{3}\s?\d{4}\s?\d{4}$/; // multiplication table + + var d = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 0, 6, 7, 8, 9, 5], [2, 3, 4, 0, 1, 7, 8, 9, 5, 6], [3, 4, 0, 1, 2, 8, 9, 5, 6, 7], [4, 0, 1, 2, 3, 9, 5, 6, 7, 8], [5, 9, 8, 7, 6, 0, 4, 3, 2, 1], [6, 5, 9, 8, 7, 1, 0, 4, 3, 2], [7, 6, 5, 9, 8, 2, 1, 0, 4, 3], [8, 7, 6, 5, 9, 3, 2, 1, 0, 4], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]; // permutation table + + var p = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 5, 7, 6, 2, 8, 3, 0, 9, 4], [5, 8, 0, 3, 7, 9, 6, 1, 4, 2], [8, 9, 1, 6, 0, 4, 3, 5, 2, 7], [9, 4, 5, 3, 1, 2, 6, 8, 7, 0], [4, 2, 8, 6, 5, 7, 3, 9, 0, 1], [2, 7, 9, 3, 8, 0, 6, 4, 1, 5], [7, 0, 4, 6, 9, 1, 3, 2, 5, 8]]; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + var c = 0; + var invertedArray = sanitized.replace(/\s/g, '').split('').map(Number).reverse(); + invertedArray.forEach(function (val, i) { + c = d[c][p[i % 8][val]]; + }); + return c === 0; + }, + IR: function IR(str) { + if (!str.match(/^\d{10}$/)) return false; + str = "0000".concat(str).substr(str.length - 6); + if (parseInt(str.substr(3, 6), 10) === 0) return false; + var lastNumber = parseInt(str.substr(9, 1), 10); + var sum = 0; + + for (var i = 0; i < 9; i++) { + sum += parseInt(str.substr(i, 1), 10) * (10 - i); + } + + sum %= 11; + return sum < 2 && lastNumber === sum || sum >= 2 && lastNumber === 11 - sum; + }, + IT: function IT(str) { + if (str.length !== 9) return false; + if (str === 'CA00000AA') return false; // https://it.wikipedia.org/wiki/Carta_d%27identit%C3%A0_elettronica_italiana + + return str.search(/C[A-Z][0-9]{5}[A-Z]{2}/i) > -1; + }, + NO: function NO(str) { + var sanitized = str.trim(); + if (isNaN(Number(sanitized))) return false; + if (sanitized.length !== 11) return false; + if (sanitized === '00000000000') return false; // https://no.wikipedia.org/wiki/F%C3%B8dselsnummer + + var f = sanitized.split('').map(Number); + var k1 = (11 - (3 * f[0] + 7 * f[1] + 6 * f[2] + 1 * f[3] + 8 * f[4] + 9 * f[5] + 4 * f[6] + 5 * f[7] + 2 * f[8]) % 11) % 11; + var k2 = (11 - (5 * f[0] + 4 * f[1] + 3 * f[2] + 2 * f[3] + 7 * f[4] + 6 * f[5] + 5 * f[6] + 4 * f[7] + 3 * f[8] + 2 * k1) % 11) % 11; + if (k1 !== f[9] || k2 !== f[10]) return false; + return true; + }, + TH: function TH(str) { + if (!str.match(/^[1-8]\d{12}$/)) return false; // validate check digit + + var sum = 0; + + for (var i = 0; i < 12; i++) { + sum += parseInt(str[i], 10) * (13 - i); + } + + return str[12] === ((11 - sum % 11) % 10).toString(); + }, + LK: function LK(str) { + var old_nic = /^[1-9]\d{8}[vx]$/i; + var new_nic = /^[1-9]\d{11}$/i; + if (str.length === 10 && old_nic.test(str)) return true;else if (str.length === 12 && new_nic.test(str)) return true; + return false; + }, + 'he-IL': function heIL(str) { + var DNI = /^\d{9}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + var id = sanitized; + var sum = 0, + incNum; + + for (var i = 0; i < id.length; i++) { + incNum = Number(id[i]) * (i % 2 + 1); // Multiply number by 1 or 2 + + sum += incNum > 9 ? incNum - 9 : incNum; // Sum the digits up and add to total + } + + return sum % 10 === 0; + }, + 'ar-LY': function arLY(str) { + // Libya National Identity Number NIN is 12 digits, the first digit is either 1 or 2 + var NIN = /^(1|2)\d{11}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!NIN.test(sanitized)) { + return false; + } + + return true; + }, + 'ar-TN': function arTN(str) { + var DNI = /^\d{8}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + return true; + }, + 'zh-CN': function zhCN(str) { + var provincesAndCities = ['11', // 北京 + '12', // 天津 + '13', // 河北 + '14', // 山西 + '15', // 内蒙古 + '21', // 辽宁 + '22', // 吉林 + '23', // 黑龙江 + '31', // 上海 + '32', // 江苏 + '33', // 浙江 + '34', // 安徽 + '35', // 福建 + '36', // 江西 + '37', // 山东 + '41', // 河南 + '42', // 湖北 + '43', // 湖南 + '44', // 广东 + '45', // 广西 + '46', // 海南 + '50', // 重庆 + '51', // 四川 + '52', // 贵州 + '53', // 云南 + '54', // 西藏 + '61', // 陕西 + '62', // 甘肃 + '63', // 青海 + '64', // 宁夏 + '65', // 新疆 + '71', // 台湾 + '81', // 香港 + '82', // 澳门 + '91' // 国外 + ]; + var powers = ['7', '9', '10', '5', '8', '4', '2', '1', '6', '3', '7', '9', '10', '5', '8', '4', '2']; + var parityBit = ['1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2']; + + var checkAddressCode = function checkAddressCode(addressCode) { + return provincesAndCities.includes(addressCode); + }; + + var checkBirthDayCode = function checkBirthDayCode(birDayCode) { + var yyyy = parseInt(birDayCode.substring(0, 4), 10); + var mm = parseInt(birDayCode.substring(4, 6), 10); + var dd = parseInt(birDayCode.substring(6), 10); + var xdata = new Date(yyyy, mm - 1, dd); + + if (xdata > new Date()) { + return false; // eslint-disable-next-line max-len + } else if (xdata.getFullYear() === yyyy && xdata.getMonth() === mm - 1 && xdata.getDate() === dd) { + return true; + } + + return false; + }; + + var getParityBit = function getParityBit(idCardNo) { + var id17 = idCardNo.substring(0, 17); + var power = 0; + + for (var i = 0; i < 17; i++) { + power += parseInt(id17.charAt(i), 10) * parseInt(powers[i], 10); + } + + var mod = power % 11; + return parityBit[mod]; + }; + + var checkParityBit = function checkParityBit(idCardNo) { + return getParityBit(idCardNo) === idCardNo.charAt(17).toUpperCase(); + }; + + var check15IdCardNo = function check15IdCardNo(idCardNo) { + var check = /^[1-9]\d{7}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}$/.test(idCardNo); + if (!check) return false; + var addressCode = idCardNo.substring(0, 2); + check = checkAddressCode(addressCode); + if (!check) return false; + var birDayCode = "19".concat(idCardNo.substring(6, 12)); + check = checkBirthDayCode(birDayCode); + if (!check) return false; + return true; + }; + + var check18IdCardNo = function check18IdCardNo(idCardNo) { + var check = /^[1-9]\d{5}[1-9]\d{3}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}(\d|x|X)$/.test(idCardNo); + if (!check) return false; + var addressCode = idCardNo.substring(0, 2); + check = checkAddressCode(addressCode); + if (!check) return false; + var birDayCode = idCardNo.substring(6, 14); + check = checkBirthDayCode(birDayCode); + if (!check) return false; + return checkParityBit(idCardNo); + }; + + var checkIdCardNo = function checkIdCardNo(idCardNo) { + var check = /^\d{15}|(\d{17}(\d|x|X))$/.test(idCardNo); + if (!check) return false; + + if (idCardNo.length === 15) { + return check15IdCardNo(idCardNo); + } + + return check18IdCardNo(idCardNo); + }; + + return checkIdCardNo(str); + }, + 'zh-TW': function zhTW(str) { + var ALPHABET_CODES = { + A: 10, + B: 11, + C: 12, + D: 13, + E: 14, + F: 15, + G: 16, + H: 17, + I: 34, + J: 18, + K: 19, + L: 20, + M: 21, + N: 22, + O: 35, + P: 23, + Q: 24, + R: 25, + S: 26, + T: 27, + U: 28, + V: 29, + W: 32, + X: 30, + Y: 31, + Z: 33 + }; + var sanitized = str.trim().toUpperCase(); + if (!/^[A-Z][0-9]{9}$/.test(sanitized)) return false; + return Array.from(sanitized).reduce(function (sum, number, index) { + if (index === 0) { + var code = ALPHABET_CODES[number]; + return code % 10 * 9 + Math.floor(code / 10); + } + + if (index === 9) { + return (10 - sum % 10 - Number(number)) % 10 === 0; + } + + return sum + Number(number) * (9 - index); + }, 0); + } +}; + +function isIdentityCard(str, locale) { + (0, _assertString.default)(str); + + if (locale in validators) { + return validators[locale](str); + } else if (locale === 'any') { + for (var key in validators) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (validators.hasOwnProperty(key)) { + var validator = validators[key]; + + if (validator(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isIn.js b/node_modules/validator/lib/isIn.js new file mode 100644 index 0000000..62c5a4d --- /dev/null +++ b/node_modules/validator/lib/isIn.js @@ -0,0 +1,42 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isIn; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _toString = _interopRequireDefault(require("./util/toString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +function isIn(str, options) { + (0, _assertString.default)(str); + var i; + + if (Object.prototype.toString.call(options) === '[object Array]') { + var array = []; + + for (i in options) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if ({}.hasOwnProperty.call(options, i)) { + array[i] = (0, _toString.default)(options[i]); + } + } + + return array.indexOf(str) >= 0; + } else if (_typeof(options) === 'object') { + return options.hasOwnProperty(str); + } else if (options && typeof options.indexOf === 'function') { + return options.indexOf(str) >= 0; + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isInt.js b/node_modules/validator/lib/isInt.js new file mode 100644 index 0000000..40f776c --- /dev/null +++ b/node_modules/validator/lib/isInt.js @@ -0,0 +1,30 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isInt; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var int = /^(?:[-+]?(?:0|[1-9][0-9]*))$/; +var intLeadingZeroes = /^[-+]?[0-9]+$/; + +function isInt(str, options) { + (0, _assertString.default)(str); + options = options || {}; // Get the regex to use for testing, based on whether + // leading zeroes are allowed or not. + + var regex = options.hasOwnProperty('allow_leading_zeroes') && !options.allow_leading_zeroes ? int : intLeadingZeroes; // Check min/max/lt/gt + + var minCheckPassed = !options.hasOwnProperty('min') || str >= options.min; + var maxCheckPassed = !options.hasOwnProperty('max') || str <= options.max; + var ltCheckPassed = !options.hasOwnProperty('lt') || str < options.lt; + var gtCheckPassed = !options.hasOwnProperty('gt') || str > options.gt; + return regex.test(str) && minCheckPassed && maxCheckPassed && ltCheckPassed && gtCheckPassed; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isJSON.js b/node_modules/validator/lib/isJSON.js new file mode 100644 index 0000000..78c09ef --- /dev/null +++ b/node_modules/validator/lib/isJSON.js @@ -0,0 +1,41 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isJSON; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +var default_json_options = { + allow_primitives: false +}; + +function isJSON(str, options) { + (0, _assertString.default)(str); + + try { + options = (0, _merge.default)(options, default_json_options); + var primitives = []; + + if (options.allow_primitives) { + primitives = [null, false, true]; + } + + var obj = JSON.parse(str); + return primitives.includes(obj) || !!obj && _typeof(obj) === 'object'; + } catch (e) { + /* ignore */ + } + + return false; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isJWT.js b/node_modules/validator/lib/isJWT.js new file mode 100644 index 0000000..65d89fc --- /dev/null +++ b/node_modules/validator/lib/isJWT.js @@ -0,0 +1,31 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isJWT; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isBase = _interopRequireDefault(require("./isBase64")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isJWT(str) { + (0, _assertString.default)(str); + var dotSplit = str.split('.'); + var len = dotSplit.length; + + if (len > 3 || len < 2) { + return false; + } + + return dotSplit.reduce(function (acc, currElem) { + return acc && (0, _isBase.default)(currElem, { + urlSafe: true + }); + }, true); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isLatLong.js b/node_modules/validator/lib/isLatLong.js new file mode 100644 index 0000000..e288812 --- /dev/null +++ b/node_modules/validator/lib/isLatLong.js @@ -0,0 +1,37 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isLatLong; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var lat = /^\(?[+-]?(90(\.0+)?|[1-8]?\d(\.\d+)?)$/; +var long = /^\s?[+-]?(180(\.0+)?|1[0-7]\d(\.\d+)?|\d{1,2}(\.\d+)?)\)?$/; +var latDMS = /^(([1-8]?\d)\D+([1-5]?\d|60)\D+([1-5]?\d|60)(\.\d+)?|90\D+0\D+0)\D+[NSns]?$/i; +var longDMS = /^\s*([1-7]?\d{1,2}\D+([1-5]?\d|60)\D+([1-5]?\d|60)(\.\d+)?|180\D+0\D+0)\D+[EWew]?$/i; +var defaultLatLongOptions = { + checkDMS: false +}; + +function isLatLong(str, options) { + (0, _assertString.default)(str); + options = (0, _merge.default)(options, defaultLatLongOptions); + if (!str.includes(',')) return false; + var pair = str.split(','); + if (pair[0].startsWith('(') && !pair[1].endsWith(')') || pair[1].endsWith(')') && !pair[0].startsWith('(')) return false; + + if (options.checkDMS) { + return latDMS.test(pair[0]) && longDMS.test(pair[1]); + } + + return lat.test(pair[0]) && long.test(pair[1]); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isLength.js b/node_modules/validator/lib/isLength.js new file mode 100644 index 0000000..39b7597 --- /dev/null +++ b/node_modules/validator/lib/isLength.js @@ -0,0 +1,35 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isLength; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +/* eslint-disable prefer-rest-params */ +function isLength(str, options) { + (0, _assertString.default)(str); + var min; + var max; + + if (_typeof(options) === 'object') { + min = options.min || 0; + max = options.max; + } else { + // backwards compatibility: isLength(str, min [, max]) + min = arguments[1] || 0; + max = arguments[2]; + } + + var surrogatePairs = str.match(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g) || []; + var len = str.length - surrogatePairs.length; + return len >= min && (typeof max === 'undefined' || len <= max); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isLicensePlate.js b/node_modules/validator/lib/isLicensePlate.js new file mode 100644 index 0000000..e65428e --- /dev/null +++ b/node_modules/validator/lib/isLicensePlate.js @@ -0,0 +1,58 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isLicensePlate; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var validators = { + 'cs-CZ': function csCZ(str) { + return /^(([ABCDEFHKIJKLMNPRSTUVXYZ]|[0-9])-?){5,8}$/.test(str); + }, + 'de-DE': function deDE(str) { + return /^((AW|UL|AK|GA|AÖ|LF|AZ|AM|AS|ZE|AN|AB|A|KG|KH|BA|EW|BZ|HY|KM|BT|HP|B|BC|BI|BO|FN|TT|ÜB|BN|AH|BS|FR|HB|ZZ|BB|BK|BÖ|OC|OK|CW|CE|C|CO|LH|CB|KW|LC|LN|DA|DI|DE|DH|SY|NÖ|DO|DD|DU|DN|D|EI|EA|EE|FI|EM|EL|EN|PF|ED|EF|ER|AU|ZP|E|ES|NT|EU|FL|FO|FT|FF|F|FS|FD|FÜ|GE|G|GI|GF|GS|ZR|GG|GP|GR|NY|ZI|GÖ|GZ|GT|HA|HH|HM|HU|WL|HZ|WR|RN|HK|HD|HN|HS|GK|HE|HF|RZ|HI|HG|HO|HX|IK|IL|IN|J|JL|KL|KA|KS|KF|KE|KI|KT|KO|KN|KR|KC|KU|K|LD|LL|LA|L|OP|LM|LI|LB|LU|LÖ|HL|LG|MD|GN|MZ|MA|ML|MR|MY|AT|DM|MC|NZ|RM|RG|MM|ME|MB|MI|FG|DL|HC|MW|RL|MK|MG|MÜ|WS|MH|M|MS|NU|NB|ND|NM|NK|NW|NR|NI|NF|DZ|EB|OZ|TG|TO|N|OA|GM|OB|CA|EH|FW|OF|OL|OE|OG|BH|LR|OS|AA|GD|OH|KY|NP|WK|PB|PA|PE|PI|PS|P|PM|PR|RA|RV|RE|R|H|SB|WN|RS|RD|RT|BM|NE|GV|RP|SU|GL|RO|GÜ|RH|EG|RW|PN|SK|MQ|RU|SZ|RI|SL|SM|SC|HR|FZ|VS|SW|SN|CR|SE|SI|SO|LP|SG|NH|SP|IZ|ST|BF|TE|HV|OD|SR|S|AC|DW|ZW|TF|TS|TR|TÜ|UM|PZ|TP|UE|UN|UH|MN|KK|VB|V|AE|PL|RC|VG|GW|PW|VR|VK|KB|WA|WT|BE|WM|WE|AP|MO|WW|FB|WZ|WI|WB|JE|WF|WO|W|WÜ|BL|Z|GC)[- ]?[A-Z]{1,2}[- ]?\d{1,4}|(AIC|FDB|ABG|SLN|SAW|KLZ|BUL|ESB|NAB|SUL|WST|ABI|AZE|BTF|KÖT|DKB|FEU|ROT|ALZ|SMÜ|WER|AUR|NOR|DÜW|BRK|HAB|TÖL|WOR|BAD|BAR|BER|BIW|EBS|KEM|MÜB|PEG|BGL|BGD|REI|WIL|BKS|BIR|WAT|BOR|BOH|BOT|BRB|BLK|HHM|NEB|NMB|WSF|LEO|HDL|WMS|WZL|BÜS|CHA|KÖZ|ROD|WÜM|CLP|NEC|COC|ZEL|COE|CUX|DAH|LDS|DEG|DEL|RSL|DLG|DGF|LAN|HEI|MED|DON|KIB|ROK|JÜL|MON|SLE|EBE|EIC|HIG|WBS|BIT|PRÜ|LIB|EMD|WIT|ERH|HÖS|ERZ|ANA|ASZ|MAB|MEK|STL|SZB|FDS|HCH|HOR|WOL|FRG|GRA|WOS|FRI|FFB|GAP|GER|BRL|CLZ|GTH|NOH|HGW|GRZ|LÖB|NOL|WSW|DUD|HMÜ|OHA|KRU|HAL|HAM|HBS|QLB|HVL|NAU|HAS|EBN|GEO|HOH|HDH|ERK|HER|WAN|HEF|ROF|HBN|ALF|HSK|USI|NAI|REH|SAN|KÜN|ÖHR|HOL|WAR|ARN|BRG|GNT|HOG|WOH|KEH|MAI|PAR|RID|ROL|KLE|GEL|KUS|KYF|ART|SDH|LDK|DIL|MAL|VIB|LER|BNA|GHA|GRM|MTL|WUR|LEV|LIF|STE|WEL|LIP|VAI|LUP|HGN|LBZ|LWL|PCH|STB|DAN|MKK|SLÜ|MSP|TBB|MGH|MTK|BIN|MSH|EIL|HET|SGH|BID|MYK|MSE|MST|MÜR|WRN|MEI|GRH|RIE|MZG|MIL|OBB|BED|FLÖ|MOL|FRW|SEE|SRB|AIB|MOS|BCH|ILL|SOB|NMS|NEA|SEF|UFF|NEW|VOH|NDH|TDO|NWM|GDB|GVM|WIS|NOM|EIN|GAN|LAU|HEB|OHV|OSL|SFB|ERB|LOS|BSK|KEL|BSB|MEL|WTL|OAL|FÜS|MOD|OHZ|OPR|BÜR|PAF|PLÖ|CAS|GLA|REG|VIT|ECK|SIM|GOA|EMS|DIZ|GOH|RÜD|SWA|NES|KÖN|MET|LRO|BÜZ|DBR|ROS|TET|HRO|ROW|BRV|HIP|PAN|GRI|SHK|EIS|SRO|SOK|LBS|SCZ|MER|QFT|SLF|SLS|HOM|SLK|ASL|BBG|SBK|SFT|SHG|MGN|MEG|ZIG|SAD|NEN|OVI|SHA|BLB|SIG|SON|SPN|FOR|GUB|SPB|IGB|WND|STD|STA|SDL|OBG|HST|BOG|SHL|PIR|FTL|SEB|SÖM|SÜW|TIR|SAB|TUT|ANG|SDT|LÜN|LSZ|MHL|VEC|VER|VIE|OVL|ANK|OVP|SBG|UEM|UER|WLG|GMN|NVP|RDG|RÜG|DAU|FKB|WAF|WAK|SLZ|WEN|SOG|APD|WUG|GUN|ESW|WIZ|WES|DIN|BRA|BÜD|WHV|HWI|GHC|WTM|WOB|WUN|MAK|SEL|OCH|HOT|WDA)[- ]?(([A-Z][- ]?\d{1,4})|([A-Z]{2}[- ]?\d{1,3})))[- ]?(E|H)?$/.test(str); + }, + 'de-LI': function deLI(str) { + return /^FL[- ]?\d{1,5}[UZ]?$/.test(str); + }, + 'fi-FI': function fiFI(str) { + return /^(?=.{4,7})(([A-Z]{1,3}|[0-9]{1,3})[\s-]?([A-Z]{1,3}|[0-9]{1,5}))$/.test(str); + }, + 'pt-PT': function ptPT(str) { + return /^([A-Z]{2}|[0-9]{2})[ -·]?([A-Z]{2}|[0-9]{2})[ -·]?([A-Z]{2}|[0-9]{2})$/.test(str); + }, + 'sq-AL': function sqAL(str) { + return /^[A-Z]{2}[- ]?((\d{3}[- ]?(([A-Z]{2})|T))|(R[- ]?\d{3}))$/.test(str); + }, + 'pt-BR': function ptBR(str) { + return /^[A-Z]{3}[ -]?[0-9][A-Z][0-9]{2}|[A-Z]{3}[ -]?[0-9]{4}$/.test(str); + } +}; + +function isLicensePlate(str, locale) { + (0, _assertString.default)(str); + + if (locale in validators) { + return validators[locale](str); + } else if (locale === 'any') { + for (var key in validators) { + /* eslint guard-for-in: 0 */ + var validator = validators[key]; + + if (validator(str)) { + return true; + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isLocale.js b/node_modules/validator/lib/isLocale.js new file mode 100644 index 0000000..1a4f609 --- /dev/null +++ b/node_modules/validator/lib/isLocale.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isLocale; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var localeReg = /^[A-Za-z]{2,4}([_-]([A-Za-z]{4}|[\d]{3}))?([_-]([A-Za-z]{2}|[\d]{3}))?$/; + +function isLocale(str) { + (0, _assertString.default)(str); + + if (str === 'en_US_POSIX' || str === 'ca_ES_VALENCIA') { + return true; + } + + return localeReg.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isLowercase.js b/node_modules/validator/lib/isLowercase.js new file mode 100644 index 0000000..7f412d9 --- /dev/null +++ b/node_modules/validator/lib/isLowercase.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isLowercase; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isLowercase(str) { + (0, _assertString.default)(str); + return str === str.toLowerCase(); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isMACAddress.js b/node_modules/validator/lib/isMACAddress.js new file mode 100644 index 0000000..125ea80 --- /dev/null +++ b/node_modules/validator/lib/isMACAddress.js @@ -0,0 +1,30 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMACAddress; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var macAddress = /^(?:[0-9a-fA-F]{2}([-:\s]))([0-9a-fA-F]{2}\1){4}([0-9a-fA-F]{2})$/; +var macAddressNoSeparators = /^([0-9a-fA-F]){12}$/; +var macAddressWithDots = /^([0-9a-fA-F]{4}\.){2}([0-9a-fA-F]{4})$/; + +function isMACAddress(str, options) { + (0, _assertString.default)(str); + /** + * @deprecated `no_colons` TODO: remove it in the next major + */ + + if (options && (options.no_colons || options.no_separators)) { + return macAddressNoSeparators.test(str); + } + + return macAddress.test(str) || macAddressWithDots.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isMD5.js b/node_modules/validator/lib/isMD5.js new file mode 100644 index 0000000..57f2b0e --- /dev/null +++ b/node_modules/validator/lib/isMD5.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMD5; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var md5 = /^[a-f0-9]{32}$/; + +function isMD5(str) { + (0, _assertString.default)(str); + return md5.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isMagnetURI.js b/node_modules/validator/lib/isMagnetURI.js new file mode 100644 index 0000000..7477e4c --- /dev/null +++ b/node_modules/validator/lib/isMagnetURI.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMagnetURI; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var magnetURI = /^magnet:\?xt(?:\.1)?=urn:(?:aich|bitprint|btih|ed2k|ed2khash|kzhash|md5|sha1|tree:tiger):[a-z0-9]{32}(?:[a-z0-9]{8})?($|&)/i; + +function isMagnetURI(url) { + (0, _assertString.default)(url); + return magnetURI.test(url.trim()); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isMimeType.js b/node_modules/validator/lib/isMimeType.js new file mode 100644 index 0000000..917aef2 --- /dev/null +++ b/node_modules/validator/lib/isMimeType.js @@ -0,0 +1,51 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMimeType; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* + Checks if the provided string matches to a correct Media type format (MIME type) + + This function only checks is the string format follows the + etablished rules by the according RFC specifications. + This function supports 'charset' in textual media types + (https://tools.ietf.org/html/rfc6657). + + This function does not check against all the media types listed + by the IANA (https://www.iana.org/assignments/media-types/media-types.xhtml) + because of lightness purposes : it would require to include + all these MIME types in this librairy, which would weigh it + significantly. This kind of effort maybe is not worth for the use that + this function has in this entire librairy. + + More informations in the RFC specifications : + - https://tools.ietf.org/html/rfc2045 + - https://tools.ietf.org/html/rfc2046 + - https://tools.ietf.org/html/rfc7231#section-3.1.1.1 + - https://tools.ietf.org/html/rfc7231#section-3.1.1.5 +*/ +// Match simple MIME types +// NB : +// Subtype length must not exceed 100 characters. +// This rule does not comply to the RFC specs (what is the max length ?). +var mimeTypeSimple = /^(application|audio|font|image|message|model|multipart|text|video)\/[a-zA-Z0-9\.\-\+]{1,100}$/i; // eslint-disable-line max-len +// Handle "charset" in "text/*" + +var mimeTypeText = /^text\/[a-zA-Z0-9\.\-\+]{1,100};\s?charset=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?$/i; // eslint-disable-line max-len +// Handle "boundary" in "multipart/*" + +var mimeTypeMultipart = /^multipart\/[a-zA-Z0-9\.\-\+]{1,100}(;\s?(boundary|charset)=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?){0,2}$/i; // eslint-disable-line max-len + +function isMimeType(str) { + (0, _assertString.default)(str); + return mimeTypeSimple.test(str) || mimeTypeText.test(str) || mimeTypeMultipart.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isMobilePhone.js b/node_modules/validator/lib/isMobilePhone.js new file mode 100644 index 0000000..a349f06 --- /dev/null +++ b/node_modules/validator/lib/isMobilePhone.js @@ -0,0 +1,200 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMobilePhone; +exports.locales = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* eslint-disable max-len */ +var phones = { + 'am-AM': /^(\+?374|0)((10|[9|7][0-9])\d{6}$|[2-4]\d{7}$)/, + 'ar-AE': /^((\+?971)|0)?5[024568]\d{7}$/, + 'ar-BH': /^(\+?973)?(3|6)\d{7}$/, + 'ar-DZ': /^(\+?213|0)(5|6|7)\d{8}$/, + 'ar-LB': /^(\+?961)?((3|81)\d{6}|7\d{7})$/, + 'ar-EG': /^((\+?20)|0)?1[0125]\d{8}$/, + 'ar-IQ': /^(\+?964|0)?7[0-9]\d{8}$/, + 'ar-JO': /^(\+?962|0)?7[789]\d{7}$/, + 'ar-KW': /^(\+?965)[569]\d{7}$/, + 'ar-LY': /^((\+?218)|0)?(9[1-6]\d{7}|[1-8]\d{7,9})$/, + 'ar-MA': /^(?:(?:\+|00)212|0)[5-7]\d{8}$/, + 'ar-OM': /^((\+|00)968)?(9[1-9])\d{6}$/, + 'ar-PS': /^(\+?970|0)5[6|9](\d{7})$/, + 'ar-SA': /^(!?(\+?966)|0)?5\d{8}$/, + 'ar-SY': /^(!?(\+?963)|0)?9\d{8}$/, + 'ar-TN': /^(\+?216)?[2459]\d{7}$/, + 'az-AZ': /^(\+994|0)(5[015]|7[07]|99)\d{7}$/, + 'bs-BA': /^((((\+|00)3876)|06))((([0-3]|[5-6])\d{6})|(4\d{7}))$/, + 'be-BY': /^(\+?375)?(24|25|29|33|44)\d{7}$/, + 'bg-BG': /^(\+?359|0)?8[789]\d{7}$/, + 'bn-BD': /^(\+?880|0)1[13456789][0-9]{8}$/, + 'ca-AD': /^(\+376)?[346]\d{5}$/, + 'cs-CZ': /^(\+?420)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/, + 'da-DK': /^(\+?45)?\s?\d{2}\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'de-DE': /^((\+49|0)[1|3])([0|5][0-45-9]\d|6([23]|0\d?)|7([0-57-9]|6\d))\d{7,9}$/, + 'de-AT': /^(\+43|0)\d{1,4}\d{3,12}$/, + 'de-CH': /^(\+41|0)([1-9])\d{1,9}$/, + 'de-LU': /^(\+352)?((6\d1)\d{6})$/, + 'dv-MV': /^(\+?960)?(7[2-9]|91|9[3-9])\d{7}$/, + 'el-GR': /^(\+?30|0)?(69\d{8})$/, + 'en-AU': /^(\+?61|0)4\d{8}$/, + 'en-BM': /^(\+?1)?441(((3|7)\d{6}$)|(5[0-3][0-9]\d{4}$)|(59\d{5}))/, + 'en-GB': /^(\+?44|0)7\d{9}$/, + 'en-GG': /^(\+?44|0)1481\d{6}$/, + 'en-GH': /^(\+233|0)(20|50|24|54|27|57|26|56|23|28|55|59)\d{7}$/, + 'en-GY': /^(\+592|0)6\d{6}$/, + 'en-HK': /^(\+?852[-\s]?)?[456789]\d{3}[-\s]?\d{4}$/, + 'en-MO': /^(\+?853[-\s]?)?[6]\d{3}[-\s]?\d{4}$/, + 'en-IE': /^(\+?353|0)8[356789]\d{7}$/, + 'en-IN': /^(\+?91|0)?[6789]\d{9}$/, + 'en-KE': /^(\+?254|0)(7|1)\d{8}$/, + 'en-KI': /^((\+686|686)?)?( )?((6|7)(2|3|8)[0-9]{6})$/, + 'en-MT': /^(\+?356|0)?(99|79|77|21|27|22|25)[0-9]{6}$/, + 'en-MU': /^(\+?230|0)?\d{8}$/, + 'en-NA': /^(\+?264|0)(6|8)\d{7}$/, + 'en-NG': /^(\+?234|0)?[789]\d{9}$/, + 'en-NZ': /^(\+?64|0)[28]\d{7,9}$/, + 'en-PK': /^((00|\+)?92|0)3[0-6]\d{8}$/, + 'en-PH': /^(09|\+639)\d{9}$/, + 'en-RW': /^(\+?250|0)?[7]\d{8}$/, + 'en-SG': /^(\+65)?[3689]\d{7}$/, + 'en-SL': /^(\+?232|0)\d{8}$/, + 'en-TZ': /^(\+?255|0)?[67]\d{8}$/, + 'en-UG': /^(\+?256|0)?[7]\d{8}$/, + 'en-US': /^((\+1|1)?( |-)?)?(\([2-9][0-9]{2}\)|[2-9][0-9]{2})( |-)?([2-9][0-9]{2}( |-)?[0-9]{4})$/, + 'en-ZA': /^(\+?27|0)\d{9}$/, + 'en-ZM': /^(\+?26)?09[567]\d{7}$/, + 'en-ZW': /^(\+263)[0-9]{9}$/, + 'en-BW': /^(\+?267)?(7[1-8]{1})\d{6}$/, + 'es-AR': /^\+?549(11|[2368]\d)\d{8}$/, + 'es-BO': /^(\+?591)?(6|7)\d{7}$/, + 'es-CO': /^(\+?57)?3(0(0|1|2|4|5)|1\d|2[0-4]|5(0|1))\d{7}$/, + 'es-CL': /^(\+?56|0)[2-9]\d{1}\d{7}$/, + 'es-CR': /^(\+506)?[2-8]\d{7}$/, + 'es-CU': /^(\+53|0053)?5\d{7}/, + 'es-DO': /^(\+?1)?8[024]9\d{7}$/, + 'es-HN': /^(\+?504)?[9|8]\d{7}$/, + 'es-EC': /^(\+?593|0)([2-7]|9[2-9])\d{7}$/, + 'es-ES': /^(\+?34)?[6|7]\d{8}$/, + 'es-PE': /^(\+?51)?9\d{8}$/, + 'es-MX': /^(\+?52)?(1|01)?\d{10,11}$/, + 'es-PA': /^(\+?507)\d{7,8}$/, + 'es-PY': /^(\+?595|0)9[9876]\d{7}$/, + 'es-SV': /^(\+?503)?[67]\d{7}$/, + 'es-UY': /^(\+598|0)9[1-9][\d]{6}$/, + 'es-VE': /^(\+?58)?(2|4)\d{9}$/, + 'et-EE': /^(\+?372)?\s?(5|8[1-4])\s?([0-9]\s?){6,7}$/, + 'fa-IR': /^(\+?98[\-\s]?|0)9[0-39]\d[\-\s]?\d{3}[\-\s]?\d{4}$/, + 'fi-FI': /^(\+?358|0)\s?(4(0|1|2|4|5|6)?|50)\s?(\d\s?){4,8}\d$/, + 'fj-FJ': /^(\+?679)?\s?\d{3}\s?\d{4}$/, + 'fo-FO': /^(\+?298)?\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'fr-BF': /^(\+226|0)[67]\d{7}$/, + 'fr-CM': /^(\+?237)6[0-9]{8}$/, + 'fr-FR': /^(\+?33|0)[67]\d{8}$/, + 'fr-GF': /^(\+?594|0|00594)[67]\d{8}$/, + 'fr-GP': /^(\+?590|0|00590)[67]\d{8}$/, + 'fr-MQ': /^(\+?596|0|00596)[67]\d{8}$/, + 'fr-PF': /^(\+?689)?8[789]\d{6}$/, + 'fr-RE': /^(\+?262|0|00262)[67]\d{8}$/, + 'he-IL': /^(\+972|0)([23489]|5[012345689]|77)[1-9]\d{6}$/, + 'hu-HU': /^(\+?36|06)(20|30|31|50|70)\d{7}$/, + 'id-ID': /^(\+?62|0)8(1[123456789]|2[1238]|3[1238]|5[12356789]|7[78]|9[56789]|8[123456789])([\s?|\d]{5,11})$/, + 'it-IT': /^(\+?39)?\s?3\d{2} ?\d{6,7}$/, + 'it-SM': /^((\+378)|(0549)|(\+390549)|(\+3780549))?6\d{5,9}$/, + 'ja-JP': /^(\+81[ \-]?(\(0\))?|0)[6789]0[ \-]?\d{4}[ \-]?\d{4}$/, + 'ka-GE': /^(\+?995)?(5|79)\d{7}$/, + 'kk-KZ': /^(\+?7|8)?7\d{9}$/, + 'kl-GL': /^(\+?299)?\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'ko-KR': /^((\+?82)[ \-]?)?0?1([0|1|6|7|8|9]{1})[ \-]?\d{3,4}[ \-]?\d{4}$/, + 'lt-LT': /^(\+370|8)\d{8}$/, + 'lv-LV': /^(\+?371)2\d{7}$/, + 'ms-MY': /^(\+?6?01){1}(([0145]{1}(\-|\s)?\d{7,8})|([236789]{1}(\s|\-)?\d{7}))$/, + 'mz-MZ': /^(\+?258)?8[234567]\d{7}$/, + 'nb-NO': /^(\+?47)?[49]\d{7}$/, + 'ne-NP': /^(\+?977)?9[78]\d{8}$/, + 'nl-BE': /^(\+?32|0)4\d{8}$/, + 'nl-NL': /^(((\+|00)?31\(0\))|((\+|00)?31)|0)6{1}\d{8}$/, + 'nn-NO': /^(\+?47)?[49]\d{7}$/, + 'pl-PL': /^(\+?48)? ?[5-8]\d ?\d{3} ?\d{2} ?\d{2}$/, + 'pt-BR': /^((\+?55\ ?[1-9]{2}\ ?)|(\+?55\ ?\([1-9]{2}\)\ ?)|(0[1-9]{2}\ ?)|(\([1-9]{2}\)\ ?)|([1-9]{2}\ ?))((\d{4}\-?\d{4})|(9[2-9]{1}\d{3}\-?\d{4}))$/, + 'pt-PT': /^(\+?351)?9[1236]\d{7}$/, + 'pt-AO': /^(\+244)\d{9}$/, + 'ro-RO': /^(\+?4?0)\s?7\d{2}(\/|\s|\.|\-)?\d{3}(\s|\.|\-)?\d{3}$/, + 'ru-RU': /^(\+?7|8)?9\d{9}$/, + 'si-LK': /^(?:0|94|\+94)?(7(0|1|2|4|5|6|7|8)( |-)?)\d{7}$/, + 'sl-SI': /^(\+386\s?|0)(\d{1}\s?\d{3}\s?\d{2}\s?\d{2}|\d{2}\s?\d{3}\s?\d{3})$/, + 'sk-SK': /^(\+?421)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/, + 'sq-AL': /^(\+355|0)6[789]\d{6}$/, + 'sr-RS': /^(\+3816|06)[- \d]{5,9}$/, + 'sv-SE': /^(\+?46|0)[\s\-]?7[\s\-]?[02369]([\s\-]?\d){7}$/, + 'tg-TJ': /^(\+?992)?[5][5]\d{7}$/, + 'th-TH': /^(\+66|66|0)\d{9}$/, + 'tr-TR': /^(\+?90|0)?5\d{9}$/, + 'tk-TM': /^(\+993|993|8)\d{8}$/, + 'uk-UA': /^(\+?38|8)?0\d{9}$/, + 'uz-UZ': /^(\+?998)?(6[125-79]|7[1-69]|88|9\d)\d{7}$/, + 'vi-VN': /^((\+?84)|0)((3([2-9]))|(5([25689]))|(7([0|6-9]))|(8([1-9]))|(9([0-9])))([0-9]{7})$/, + 'zh-CN': /^((\+|00)86)?(1[3-9]|9[28])\d{9}$/, + 'zh-TW': /^(\+?886\-?|0)?9\d{8}$/, + 'dz-BT': /^(\+?975|0)?(17|16|77|02)\d{6}$/ +}; +/* eslint-enable max-len */ +// aliases + +phones['en-CA'] = phones['en-US']; +phones['fr-CA'] = phones['en-CA']; +phones['fr-BE'] = phones['nl-BE']; +phones['zh-HK'] = phones['en-HK']; +phones['zh-MO'] = phones['en-MO']; +phones['ga-IE'] = phones['en-IE']; +phones['fr-CH'] = phones['de-CH']; +phones['it-CH'] = phones['fr-CH']; + +function isMobilePhone(str, locale, options) { + (0, _assertString.default)(str); + + if (options && options.strictMode && !str.startsWith('+')) { + return false; + } + + if (Array.isArray(locale)) { + return locale.some(function (key) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (phones.hasOwnProperty(key)) { + var phone = phones[key]; + + if (phone.test(str)) { + return true; + } + } + + return false; + }); + } else if (locale in phones) { + return phones[locale].test(str); // alias falsey locale as 'any' + } else if (!locale || locale === 'any') { + for (var key in phones) { + // istanbul ignore else + if (phones.hasOwnProperty(key)) { + var phone = phones[key]; + + if (phone.test(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +var locales = Object.keys(phones); +exports.locales = locales; \ No newline at end of file diff --git a/node_modules/validator/lib/isMongoId.js b/node_modules/validator/lib/isMongoId.js new file mode 100644 index 0000000..2e9884d --- /dev/null +++ b/node_modules/validator/lib/isMongoId.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMongoId; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isHexadecimal = _interopRequireDefault(require("./isHexadecimal")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isMongoId(str) { + (0, _assertString.default)(str); + return (0, _isHexadecimal.default)(str) && str.length === 24; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isMultibyte.js b/node_modules/validator/lib/isMultibyte.js new file mode 100644 index 0000000..3b4477e --- /dev/null +++ b/node_modules/validator/lib/isMultibyte.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isMultibyte; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* eslint-disable no-control-regex */ +var multibyte = /[^\x00-\x7F]/; +/* eslint-enable no-control-regex */ + +function isMultibyte(str) { + (0, _assertString.default)(str); + return multibyte.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isNumeric.js b/node_modules/validator/lib/isNumeric.js new file mode 100644 index 0000000..441f30f --- /dev/null +++ b/node_modules/validator/lib/isNumeric.js @@ -0,0 +1,27 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isNumeric; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _alpha = require("./alpha"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var numericNoSymbols = /^[0-9]+$/; + +function isNumeric(str, options) { + (0, _assertString.default)(str); + + if (options && options.no_symbols) { + return numericNoSymbols.test(str); + } + + return new RegExp("^[+-]?([0-9]*[".concat((options || {}).locale ? _alpha.decimal[options.locale] : '.', "])?[0-9]+$")).test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isOctal.js b/node_modules/validator/lib/isOctal.js new file mode 100644 index 0000000..8d3a1c7 --- /dev/null +++ b/node_modules/validator/lib/isOctal.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isOctal; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var octal = /^(0o)?[0-7]+$/i; + +function isOctal(str) { + (0, _assertString.default)(str); + return octal.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isPassportNumber.js b/node_modules/validator/lib/isPassportNumber.js new file mode 100644 index 0000000..192414e --- /dev/null +++ b/node_modules/validator/lib/isPassportNumber.js @@ -0,0 +1,138 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isPassportNumber; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reference: + * https://en.wikipedia.org/ -- Wikipedia + * https://docs.microsoft.com/en-us/microsoft-365/compliance/eu-passport-number -- EU Passport Number + * https://countrycode.org/ -- Country Codes + */ +var passportRegexByCountryCode = { + AM: /^[A-Z]{2}\d{7}$/, + // ARMENIA + AR: /^[A-Z]{3}\d{6}$/, + // ARGENTINA + AT: /^[A-Z]\d{7}$/, + // AUSTRIA + AU: /^[A-Z]\d{7}$/, + // AUSTRALIA + BE: /^[A-Z]{2}\d{6}$/, + // BELGIUM + BG: /^\d{9}$/, + // BULGARIA + BR: /^[A-Z]{2}\d{6}$/, + // BRAZIL + BY: /^[A-Z]{2}\d{7}$/, + // BELARUS + CA: /^[A-Z]{2}\d{6}$/, + // CANADA + CH: /^[A-Z]\d{7}$/, + // SWITZERLAND + CN: /^G\d{8}$|^E(?![IO])[A-Z0-9]\d{7}$/, + // CHINA [G=Ordinary, E=Electronic] followed by 8-digits, or E followed by any UPPERCASE letter (except I and O) followed by 7 digits + CY: /^[A-Z](\d{6}|\d{8})$/, + // CYPRUS + CZ: /^\d{8}$/, + // CZECH REPUBLIC + DE: /^[CFGHJKLMNPRTVWXYZ0-9]{9}$/, + // GERMANY + DK: /^\d{9}$/, + // DENMARK + DZ: /^\d{9}$/, + // ALGERIA + EE: /^([A-Z]\d{7}|[A-Z]{2}\d{7})$/, + // ESTONIA (K followed by 7-digits), e-passports have 2 UPPERCASE followed by 7 digits + ES: /^[A-Z0-9]{2}([A-Z0-9]?)\d{6}$/, + // SPAIN + FI: /^[A-Z]{2}\d{7}$/, + // FINLAND + FR: /^\d{2}[A-Z]{2}\d{5}$/, + // FRANCE + GB: /^\d{9}$/, + // UNITED KINGDOM + GR: /^[A-Z]{2}\d{7}$/, + // GREECE + HR: /^\d{9}$/, + // CROATIA + HU: /^[A-Z]{2}(\d{6}|\d{7})$/, + // HUNGARY + IE: /^[A-Z0-9]{2}\d{7}$/, + // IRELAND + IN: /^[A-Z]{1}-?\d{7}$/, + // INDIA + ID: /^[A-C]\d{7}$/, + // INDONESIA + IR: /^[A-Z]\d{8}$/, + // IRAN + IS: /^(A)\d{7}$/, + // ICELAND + IT: /^[A-Z0-9]{2}\d{7}$/, + // ITALY + JP: /^[A-Z]{2}\d{7}$/, + // JAPAN + KR: /^[MS]\d{8}$/, + // SOUTH KOREA, REPUBLIC OF KOREA, [S=PS Passports, M=PM Passports] + LT: /^[A-Z0-9]{8}$/, + // LITHUANIA + LU: /^[A-Z0-9]{8}$/, + // LUXEMBURG + LV: /^[A-Z0-9]{2}\d{7}$/, + // LATVIA + LY: /^[A-Z0-9]{8}$/, + // LIBYA + MT: /^\d{7}$/, + // MALTA + MZ: /^([A-Z]{2}\d{7})|(\d{2}[A-Z]{2}\d{5})$/, + // MOZAMBIQUE + MY: /^[AHK]\d{8}$/, + // MALAYSIA + NL: /^[A-Z]{2}[A-Z0-9]{6}\d$/, + // NETHERLANDS + PL: /^[A-Z]{2}\d{7}$/, + // POLAND + PT: /^[A-Z]\d{6}$/, + // PORTUGAL + RO: /^\d{8,9}$/, + // ROMANIA + RU: /^\d{9}$/, + // RUSSIAN FEDERATION + SE: /^\d{8}$/, + // SWEDEN + SL: /^(P)[A-Z]\d{7}$/, + // SLOVANIA + SK: /^[0-9A-Z]\d{7}$/, + // SLOVAKIA + TR: /^[A-Z]\d{8}$/, + // TURKEY + UA: /^[A-Z]{2}\d{6}$/, + // UKRAINE + US: /^\d{9}$/ // UNITED STATES + +}; +/** + * Check if str is a valid passport number + * relative to provided ISO Country Code. + * + * @param {string} str + * @param {string} countryCode + * @return {boolean} + */ + +function isPassportNumber(str, countryCode) { + (0, _assertString.default)(str); + /** Remove All Whitespaces, Convert to UPPERCASE */ + + var normalizedStr = str.replace(/\s/g, '').toUpperCase(); + return countryCode.toUpperCase() in passportRegexByCountryCode && passportRegexByCountryCode[countryCode].test(normalizedStr); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isPort.js b/node_modules/validator/lib/isPort.js new file mode 100644 index 0000000..9274a4c --- /dev/null +++ b/node_modules/validator/lib/isPort.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isPort; + +var _isInt = _interopRequireDefault(require("./isInt")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isPort(str) { + return (0, _isInt.default)(str, { + min: 0, + max: 65535 + }); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isPostalCode.js b/node_modules/validator/lib/isPostalCode.js new file mode 100644 index 0000000..17c5318 --- /dev/null +++ b/node_modules/validator/lib/isPostalCode.js @@ -0,0 +1,109 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isPostalCode; +exports.locales = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// common patterns +var threeDigit = /^\d{3}$/; +var fourDigit = /^\d{4}$/; +var fiveDigit = /^\d{5}$/; +var sixDigit = /^\d{6}$/; +var patterns = { + AD: /^AD\d{3}$/, + AT: fourDigit, + AU: fourDigit, + AZ: /^AZ\d{4}$/, + BE: fourDigit, + BG: fourDigit, + BR: /^\d{5}-\d{3}$/, + BY: /2[1-4]{1}\d{4}$/, + CA: /^[ABCEGHJKLMNPRSTVXY]\d[ABCEGHJ-NPRSTV-Z][\s\-]?\d[ABCEGHJ-NPRSTV-Z]\d$/i, + CH: fourDigit, + CN: /^(0[1-7]|1[012356]|2[0-7]|3[0-6]|4[0-7]|5[1-7]|6[1-7]|7[1-5]|8[1345]|9[09])\d{4}$/, + CZ: /^\d{3}\s?\d{2}$/, + DE: fiveDigit, + DK: fourDigit, + DO: fiveDigit, + DZ: fiveDigit, + EE: fiveDigit, + ES: /^(5[0-2]{1}|[0-4]{1}\d{1})\d{3}$/, + FI: fiveDigit, + FR: /^\d{2}\s?\d{3}$/, + GB: /^(gir\s?0aa|[a-z]{1,2}\d[\da-z]?\s?(\d[a-z]{2})?)$/i, + GR: /^\d{3}\s?\d{2}$/, + HR: /^([1-5]\d{4}$)/, + HT: /^HT\d{4}$/, + HU: fourDigit, + ID: fiveDigit, + IE: /^(?!.*(?:o))[A-Za-z]\d[\dw]\s\w{4}$/i, + IL: /^(\d{5}|\d{7})$/, + IN: /^((?!10|29|35|54|55|65|66|86|87|88|89)[1-9][0-9]{5})$/, + IR: /\b(?!(\d)\1{3})[13-9]{4}[1346-9][013-9]{5}\b/, + IS: threeDigit, + IT: fiveDigit, + JP: /^\d{3}\-\d{4}$/, + KE: fiveDigit, + KR: /^(\d{5}|\d{6})$/, + LI: /^(948[5-9]|949[0-7])$/, + LT: /^LT\-\d{5}$/, + LU: fourDigit, + LV: /^LV\-\d{4}$/, + LK: fiveDigit, + MX: fiveDigit, + MT: /^[A-Za-z]{3}\s{0,1}\d{4}$/, + MY: fiveDigit, + NL: /^\d{4}\s?[a-z]{2}$/i, + NO: fourDigit, + NP: /^(10|21|22|32|33|34|44|45|56|57)\d{3}$|^(977)$/i, + NZ: fourDigit, + PL: /^\d{2}\-\d{3}$/, + PR: /^00[679]\d{2}([ -]\d{4})?$/, + PT: /^\d{4}\-\d{3}?$/, + RO: sixDigit, + RU: sixDigit, + SA: fiveDigit, + SE: /^[1-9]\d{2}\s?\d{2}$/, + SG: sixDigit, + SI: fourDigit, + SK: /^\d{3}\s?\d{2}$/, + TH: fiveDigit, + TN: fourDigit, + TW: /^\d{3}(\d{2})?$/, + UA: fiveDigit, + US: /^\d{5}(-\d{4})?$/, + ZA: fourDigit, + ZM: fiveDigit +}; +var locales = Object.keys(patterns); +exports.locales = locales; + +function isPostalCode(str, locale) { + (0, _assertString.default)(str); + + if (locale in patterns) { + return patterns[locale].test(str); + } else if (locale === 'any') { + for (var key in patterns) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (patterns.hasOwnProperty(key)) { + var pattern = patterns[key]; + + if (pattern.test(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/lib/isRFC3339.js b/node_modules/validator/lib/isRFC3339.js new file mode 100644 index 0000000..da10790 --- /dev/null +++ b/node_modules/validator/lib/isRFC3339.js @@ -0,0 +1,33 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isRFC3339; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/* Based on https://tools.ietf.org/html/rfc3339#section-5.6 */ +var dateFullYear = /[0-9]{4}/; +var dateMonth = /(0[1-9]|1[0-2])/; +var dateMDay = /([12]\d|0[1-9]|3[01])/; +var timeHour = /([01][0-9]|2[0-3])/; +var timeMinute = /[0-5][0-9]/; +var timeSecond = /([0-5][0-9]|60)/; +var timeSecFrac = /(\.[0-9]+)?/; +var timeNumOffset = new RegExp("[-+]".concat(timeHour.source, ":").concat(timeMinute.source)); +var timeOffset = new RegExp("([zZ]|".concat(timeNumOffset.source, ")")); +var partialTime = new RegExp("".concat(timeHour.source, ":").concat(timeMinute.source, ":").concat(timeSecond.source).concat(timeSecFrac.source)); +var fullDate = new RegExp("".concat(dateFullYear.source, "-").concat(dateMonth.source, "-").concat(dateMDay.source)); +var fullTime = new RegExp("".concat(partialTime.source).concat(timeOffset.source)); +var rfc3339 = new RegExp("^".concat(fullDate.source, "[ tT]").concat(fullTime.source, "$")); + +function isRFC3339(str) { + (0, _assertString.default)(str); + return rfc3339.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isRgbColor.js b/node_modules/validator/lib/isRgbColor.js new file mode 100644 index 0000000..9622291 --- /dev/null +++ b/node_modules/validator/lib/isRgbColor.js @@ -0,0 +1,29 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isRgbColor; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var rgbColor = /^rgb\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\)$/; +var rgbaColor = /^rgba\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)$/; +var rgbColorPercent = /^rgb\((([0-9]%|[1-9][0-9]%|100%),){2}([0-9]%|[1-9][0-9]%|100%)\)/; +var rgbaColorPercent = /^rgba\((([0-9]%|[1-9][0-9]%|100%),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)/; + +function isRgbColor(str) { + var includePercentValues = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true; + (0, _assertString.default)(str); + + if (!includePercentValues) { + return rgbColor.test(str) || rgbaColor.test(str); + } + + return rgbColor.test(str) || rgbaColor.test(str) || rgbColorPercent.test(str) || rgbaColorPercent.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isSemVer.js b/node_modules/validator/lib/isSemVer.js new file mode 100644 index 0000000..27355cd --- /dev/null +++ b/node_modules/validator/lib/isSemVer.js @@ -0,0 +1,28 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isSemVer; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _multilineRegex = _interopRequireDefault(require("./util/multilineRegex")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Regular Expression to match + * semantic versioning (SemVer) + * built from multi-line, multi-parts regexp + * Reference: https://semver.org/ + */ +var semanticVersioningRegex = (0, _multilineRegex.default)(['^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)', '(?:-((?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*))*))', '?(?:\\+([0-9a-z-]+(?:\\.[0-9a-z-]+)*))?$'], 'i'); + +function isSemVer(str) { + (0, _assertString.default)(str); + return semanticVersioningRegex.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isSlug.js b/node_modules/validator/lib/isSlug.js new file mode 100644 index 0000000..c40e6ad --- /dev/null +++ b/node_modules/validator/lib/isSlug.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isSlug; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var charsetRegex = /^[^\s-_](?!.*?[-_]{2,})[a-z0-9-\\][^\s]*[^-_\s]$/; + +function isSlug(str) { + (0, _assertString.default)(str); + return charsetRegex.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isStrongPassword.js b/node_modules/validator/lib/isStrongPassword.js new file mode 100644 index 0000000..a6a5a33 --- /dev/null +++ b/node_modules/validator/lib/isStrongPassword.js @@ -0,0 +1,115 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isStrongPassword; + +var _merge = _interopRequireDefault(require("./util/merge")); + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var upperCaseRegex = /^[A-Z]$/; +var lowerCaseRegex = /^[a-z]$/; +var numberRegex = /^[0-9]$/; +var symbolRegex = /^[-#!$@%^&*()_+|~=`{}\[\]:";'<>?,.\/ ]$/; +var defaultOptions = { + minLength: 8, + minLowercase: 1, + minUppercase: 1, + minNumbers: 1, + minSymbols: 1, + returnScore: false, + pointsPerUnique: 1, + pointsPerRepeat: 0.5, + pointsForContainingLower: 10, + pointsForContainingUpper: 10, + pointsForContainingNumber: 10, + pointsForContainingSymbol: 10 +}; +/* Counts number of occurrences of each char in a string + * could be moved to util/ ? +*/ + +function countChars(str) { + var result = {}; + Array.from(str).forEach(function (char) { + var curVal = result[char]; + + if (curVal) { + result[char] += 1; + } else { + result[char] = 1; + } + }); + return result; +} +/* Return information about a password */ + + +function analyzePassword(password) { + var charMap = countChars(password); + var analysis = { + length: password.length, + uniqueChars: Object.keys(charMap).length, + uppercaseCount: 0, + lowercaseCount: 0, + numberCount: 0, + symbolCount: 0 + }; + Object.keys(charMap).forEach(function (char) { + /* istanbul ignore else */ + if (upperCaseRegex.test(char)) { + analysis.uppercaseCount += charMap[char]; + } else if (lowerCaseRegex.test(char)) { + analysis.lowercaseCount += charMap[char]; + } else if (numberRegex.test(char)) { + analysis.numberCount += charMap[char]; + } else if (symbolRegex.test(char)) { + analysis.symbolCount += charMap[char]; + } + }); + return analysis; +} + +function scorePassword(analysis, scoringOptions) { + var points = 0; + points += analysis.uniqueChars * scoringOptions.pointsPerUnique; + points += (analysis.length - analysis.uniqueChars) * scoringOptions.pointsPerRepeat; + + if (analysis.lowercaseCount > 0) { + points += scoringOptions.pointsForContainingLower; + } + + if (analysis.uppercaseCount > 0) { + points += scoringOptions.pointsForContainingUpper; + } + + if (analysis.numberCount > 0) { + points += scoringOptions.pointsForContainingNumber; + } + + if (analysis.symbolCount > 0) { + points += scoringOptions.pointsForContainingSymbol; + } + + return points; +} + +function isStrongPassword(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null; + (0, _assertString.default)(str); + var analysis = analyzePassword(str); + options = (0, _merge.default)(options || {}, defaultOptions); + + if (options.returnScore) { + return scorePassword(analysis, options); + } + + return analysis.length >= options.minLength && analysis.lowercaseCount >= options.minLowercase && analysis.uppercaseCount >= options.minUppercase && analysis.numberCount >= options.minNumbers && analysis.symbolCount >= options.minSymbols; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isSurrogatePair.js b/node_modules/validator/lib/isSurrogatePair.js new file mode 100644 index 0000000..ee5678b --- /dev/null +++ b/node_modules/validator/lib/isSurrogatePair.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isSurrogatePair; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var surrogatePair = /[\uD800-\uDBFF][\uDC00-\uDFFF]/; + +function isSurrogatePair(str) { + (0, _assertString.default)(str); + return surrogatePair.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isTaxID.js b/node_modules/validator/lib/isTaxID.js new file mode 100644 index 0000000..2a893b7 --- /dev/null +++ b/node_modules/validator/lib/isTaxID.js @@ -0,0 +1,1528 @@ +"use strict"; + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isTaxID; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var algorithms = _interopRequireWildcard(require("./util/algorithms")); + +var _isDate = _interopRequireDefault(require("./isDate")); + +function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function _getRequireWildcardCache() { return cache; }; return cache; } + +function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); } + +function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); } + +function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +/** + * TIN Validation + * Validates Tax Identification Numbers (TINs) from the US, EU member states and the United Kingdom. + * + * EU-UK: + * National TIN validity is calculated using public algorithms as made available by DG TAXUD. + * + * See `https://ec.europa.eu/taxation_customs/tin/specs/FS-TIN%20Algorithms-Public.docx` for more information. + * + * US: + * An Employer Identification Number (EIN), also known as a Federal Tax Identification Number, + * is used to identify a business entity. + * + * NOTES: + * - Prefix 47 is being reserved for future use + * - Prefixes 26, 27, 45, 46 and 47 were previously assigned by the Philadelphia campus. + * + * See `http://www.irs.gov/Businesses/Small-Businesses-&-Self-Employed/How-EINs-are-Assigned-and-Valid-EIN-Prefixes` + * for more information. + */ +// Locale functions + +/* + * bg-BG validation function + * (Edinen graždanski nomer (EGN/ЕГН), persons only) + * Checks if birth date (first six digits) is valid and calculates check (last) digit + */ +function bgBgCheck(tin) { + // Extract full year, normalize month and check birth date validity + var century_year = tin.slice(0, 2); + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 40) { + month -= 40; + century_year = "20".concat(century_year); + } else if (month > 20) { + month -= 20; + century_year = "18".concat(century_year); + } else { + century_year = "19".concat(century_year); + } + + if (month < 10) { + month = "0".concat(month); + } + + var date = "".concat(century_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); // Calculate checksum by multiplying digits with fixed values + + var multip_lookup = [2, 4, 8, 5, 10, 9, 7, 3, 6]; + var checksum = 0; + + for (var i = 0; i < multip_lookup.length; i++) { + checksum += digits[i] * multip_lookup[i]; + } + + checksum = checksum % 11 === 10 ? 0 : checksum % 11; + return checksum === digits[9]; +} +/* + * cs-CZ validation function + * (Rodné číslo (RČ), persons only) + * Checks if birth date (first six digits) is valid and divisibility by 11 + * Material not in DG TAXUD document sourced from: + * -`https://lorenc.info/3MA381/overeni-spravnosti-rodneho-cisla.htm` + * -`https://www.mvcr.cz/clanek/rady-a-sluzby-dokumenty-rodne-cislo.aspx` + */ + + +function csCzCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract full year from TIN length + + var full_year = parseInt(tin.slice(0, 2), 10); + + if (tin.length === 10) { + if (full_year < 54) { + full_year = "20".concat(full_year); + } else { + full_year = "19".concat(full_year); + } + } else { + if (tin.slice(6) === '000') { + return false; + } // Three-zero serial not assigned before 1954 + + + if (full_year < 54) { + full_year = "19".concat(full_year); + } else { + return false; // No 18XX years seen in any of the resources + } + } // Add missing zero if needed + + + if (full_year.length === 3) { + full_year = [full_year.slice(0, 2), '0', full_year.slice(2)].join(''); + } // Extract month from TIN and normalize + + + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 50) { + month -= 50; + } + + if (month > 20) { + // Month-plus-twenty was only introduced in 2004 + if (parseInt(full_year, 10) < 2004) { + return false; + } + + month -= 20; + } + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Verify divisibility by 11 + + + if (tin.length === 10) { + if (parseInt(tin, 10) % 11 !== 0) { + // Some numbers up to and including 1985 are still valid if + // check (last) digit equals 0 and modulo of first 9 digits equals 10 + var checkdigit = parseInt(tin.slice(0, 9), 10) % 11; + + if (parseInt(full_year, 10) < 1986 && checkdigit === 10) { + if (parseInt(tin.slice(9), 10) !== 0) { + return false; + } + } else { + return false; + } + } + } + + return true; +} +/* + * de-AT validation function + * (Abgabenkontonummer, persons/entities) + * Verify TIN validity by calling luhnCheck() + */ + + +function deAtCheck(tin) { + return algorithms.luhnCheck(tin); +} +/* + * de-DE validation function + * (Steueridentifikationsnummer (Steuer-IdNr.), persons only) + * Tests for single duplicate/triplicate value, then calculates ISO 7064 check (last) digit + * Partial implementation of spec (same result with both algorithms always) + */ + + +function deDeCheck(tin) { + // Split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); // Fill array with strings of number positions + + var occurences = []; + + for (var i = 0; i < digits.length - 1; i++) { + occurences.push(''); + + for (var j = 0; j < digits.length - 1; j++) { + if (digits[i] === digits[j]) { + occurences[i] += j; + } + } + } // Remove digits with one occurence and test for only one duplicate/triplicate + + + occurences = occurences.filter(function (a) { + return a.length > 1; + }); + + if (occurences.length !== 2 && occurences.length !== 3) { + return false; + } // In case of triplicate value only two digits are allowed next to each other + + + if (occurences[0].length === 3) { + var trip_locations = occurences[0].split('').map(function (a) { + return parseInt(a, 10); + }); + var recurrent = 0; // Amount of neighbour occurences + + for (var _i = 0; _i < trip_locations.length - 1; _i++) { + if (trip_locations[_i] + 1 === trip_locations[_i + 1]) { + recurrent += 1; + } + } + + if (recurrent === 2) { + return false; + } + } + + return algorithms.iso7064Check(tin); +} +/* + * dk-DK validation function + * (CPR-nummer (personnummer), persons only) + * Checks if birth date (first six digits) is valid and assigned to century (seventh) digit, + * and calculates check (last) digit + */ + + +function dkDkCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract year, check if valid for given century digit and add century + + var year = parseInt(tin.slice(4, 6), 10); + var century_digit = tin.slice(6, 7); + + switch (century_digit) { + case '0': + case '1': + case '2': + case '3': + year = "19".concat(year); + break; + + case '4': + case '9': + if (year < 37) { + year = "20".concat(year); + } else { + year = "19".concat(year); + } + + break; + + default: + if (year < 37) { + year = "20".concat(year); + } else if (year > 58) { + year = "18".concat(year); + } else { + return false; + } + + break; + } // Add missing zero if needed + + + if (year.length === 3) { + year = [year.slice(0, 2), '0', year.slice(2)].join(''); + } // Check date validity + + + var date = "".concat(year, "/").concat(tin.slice(2, 4), "/").concat(tin.slice(0, 2)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + var weight = 4; // Multiply by weight and add to checksum + + for (var i = 0; i < 9; i++) { + checksum += digits[i] * weight; + weight -= 1; + + if (weight === 1) { + weight = 7; + } + } + + checksum %= 11; + + if (checksum === 1) { + return false; + } + + return checksum === 0 ? digits[9] === 0 : digits[9] === 11 - checksum; +} +/* + * el-CY validation function + * (Arithmos Forologikou Mitroou (AFM/ΑΦΜ), persons only) + * Verify TIN validity by calculating ASCII value of check (last) character + */ + + +function elCyCheck(tin) { + // split digits into an array for further processing + var digits = tin.slice(0, 8).split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; // add digits in even places + + for (var i = 1; i < digits.length; i += 2) { + checksum += digits[i]; + } // add digits in odd places + + + for (var _i2 = 0; _i2 < digits.length; _i2 += 2) { + if (digits[_i2] < 2) { + checksum += 1 - digits[_i2]; + } else { + checksum += 2 * (digits[_i2] - 2) + 5; + + if (digits[_i2] > 4) { + checksum += 2; + } + } + } + + return String.fromCharCode(checksum % 26 + 65) === tin.charAt(8); +} +/* + * el-GR validation function + * (Arithmos Forologikou Mitroou (AFM/ΑΦΜ), persons/entities) + * Verify TIN validity by calculating check (last) digit + * Algorithm not in DG TAXUD document- sourced from: + * - `http://epixeirisi.gr/%CE%9A%CE%A1%CE%99%CE%A3%CE%99%CE%9C%CE%91-%CE%98%CE%95%CE%9C%CE%91%CE%A4%CE%91-%CE%A6%CE%9F%CE%A1%CE%9F%CE%9B%CE%9F%CE%93%CE%99%CE%91%CE%A3-%CE%9A%CE%91%CE%99-%CE%9B%CE%9F%CE%93%CE%99%CE%A3%CE%A4%CE%99%CE%9A%CE%97%CE%A3/23791/%CE%91%CF%81%CE%B9%CE%B8%CE%BC%CF%8C%CF%82-%CE%A6%CE%BF%CF%81%CE%BF%CE%BB%CE%BF%CE%B3%CE%B9%CE%BA%CE%BF%CF%8D-%CE%9C%CE%B7%CF%84%CF%81%CF%8E%CE%BF%CF%85` + */ + + +function elGrCheck(tin) { + // split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + + for (var i = 0; i < 8; i++) { + checksum += digits[i] * Math.pow(2, 8 - i); + } + + return checksum % 11 % 10 === digits[8]; +} +/* + * en-GB validation function (should go here if needed) + * (National Insurance Number (NINO) or Unique Taxpayer Reference (UTR), + * persons/entities respectively) + */ + +/* + * en-IE validation function + * (Personal Public Service Number (PPS No), persons only) + * Verify TIN validity by calculating check (second to last) character + */ + + +function enIeCheck(tin) { + var checksum = algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 7).map(function (a) { + return parseInt(a, 10); + }), 8); + + if (tin.length === 9 && tin[8] !== 'W') { + checksum += (tin[8].charCodeAt(0) - 64) * 9; + } + + checksum %= 23; + + if (checksum === 0) { + return tin[7].toUpperCase() === 'W'; + } + + return tin[7].toUpperCase() === String.fromCharCode(64 + checksum); +} // Valid US IRS campus prefixes + + +var enUsCampusPrefix = { + andover: ['10', '12'], + atlanta: ['60', '67'], + austin: ['50', '53'], + brookhaven: ['01', '02', '03', '04', '05', '06', '11', '13', '14', '16', '21', '22', '23', '25', '34', '51', '52', '54', '55', '56', '57', '58', '59', '65'], + cincinnati: ['30', '32', '35', '36', '37', '38', '61'], + fresno: ['15', '24'], + internet: ['20', '26', '27', '45', '46', '47'], + kansas: ['40', '44'], + memphis: ['94', '95'], + ogden: ['80', '90'], + philadelphia: ['33', '39', '41', '42', '43', '46', '48', '62', '63', '64', '66', '68', '71', '72', '73', '74', '75', '76', '77', '81', '82', '83', '84', '85', '86', '87', '88', '91', '92', '93', '98', '99'], + sba: ['31'] +}; // Return an array of all US IRS campus prefixes + +function enUsGetPrefixes() { + var prefixes = []; + + for (var location in enUsCampusPrefix) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (enUsCampusPrefix.hasOwnProperty(location)) { + prefixes.push.apply(prefixes, _toConsumableArray(enUsCampusPrefix[location])); + } + } + + return prefixes; +} +/* + * en-US validation function + * Verify that the TIN starts with a valid IRS campus prefix + */ + + +function enUsCheck(tin) { + return enUsGetPrefixes().indexOf(tin.substr(0, 2)) !== -1; +} +/* + * es-ES validation function + * (Documento Nacional de Identidad (DNI) + * or Número de Identificación de Extranjero (NIE), persons only) + * Verify TIN validity by calculating check (last) character + */ + + +function esEsCheck(tin) { + // Split characters into an array for further processing + var chars = tin.toUpperCase().split(''); // Replace initial letter if needed + + if (isNaN(parseInt(chars[0], 10)) && chars.length > 1) { + var lead_replace = 0; + + switch (chars[0]) { + case 'Y': + lead_replace = 1; + break; + + case 'Z': + lead_replace = 2; + break; + + default: + } + + chars.splice(0, 1, lead_replace); // Fill with zeros if smaller than proper + } else { + while (chars.length < 9) { + chars.unshift(0); + } + } // Calculate checksum and check according to lookup + + + var lookup = ['T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J', 'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E']; + chars = chars.join(''); + var checksum = parseInt(chars.slice(0, 8), 10) % 23; + return chars[8] === lookup[checksum]; +} +/* + * et-EE validation function + * (Isikukood (IK), persons only) + * Checks if birth date (century digit and six following) is valid and calculates check (last) digit + * Material not in DG TAXUD document sourced from: + * - `https://www.oecd.org/tax/automatic-exchange/crs-implementation-and-assistance/tax-identification-numbers/Estonia-TIN.pdf` + */ + + +function etEeCheck(tin) { + // Extract year and add century + var full_year = tin.slice(1, 3); + var century_digit = tin.slice(0, 1); + + switch (century_digit) { + case '1': + case '2': + full_year = "18".concat(full_year); + break; + + case '3': + case '4': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(3, 5), "/").concat(tin.slice(5, 7)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + var weight = 1; // Multiply by weight and add to checksum + + for (var i = 0; i < 10; i++) { + checksum += digits[i] * weight; + weight += 1; + + if (weight === 10) { + weight = 1; + } + } // Do again if modulo 11 of checksum is 10 + + + if (checksum % 11 === 10) { + checksum = 0; + weight = 3; + + for (var _i3 = 0; _i3 < 10; _i3++) { + checksum += digits[_i3] * weight; + weight += 1; + + if (weight === 10) { + weight = 1; + } + } + + if (checksum % 11 === 10) { + return digits[10] === 0; + } + } + + return checksum % 11 === digits[10]; +} +/* + * fi-FI validation function + * (Henkilötunnus (HETU), persons only) + * Checks if birth date (first six digits plus century symbol) is valid + * and calculates check (last) digit + */ + + +function fiFiCheck(tin) { + // Extract year and add century + var full_year = tin.slice(4, 6); + var century_symbol = tin.slice(6, 7); + + switch (century_symbol) { + case '+': + full_year = "18".concat(full_year); + break; + + case '-': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(2, 4), "/").concat(tin.slice(0, 2)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Calculate check character + + + var checksum = parseInt(tin.slice(0, 6) + tin.slice(7, 10), 10) % 31; + + if (checksum < 10) { + return checksum === parseInt(tin.slice(10), 10); + } + + checksum -= 10; + var letters_lookup = ['A', 'B', 'C', 'D', 'E', 'F', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y']; + return letters_lookup[checksum] === tin.slice(10); +} +/* + * fr/nl-BE validation function + * (Numéro national (N.N.), persons only) + * Checks if birth date (first six digits) is valid and calculates check (last two) digits + */ + + +function frBeCheck(tin) { + // Zero month/day value is acceptable + if (tin.slice(2, 4) !== '00' || tin.slice(4, 6) !== '00') { + // Extract date from first six digits of TIN + var date = "".concat(tin.slice(0, 2), "/").concat(tin.slice(2, 4), "/").concat(tin.slice(4, 6)); + + if (!(0, _isDate.default)(date, 'YY/MM/DD')) { + return false; + } + } + + var checksum = 97 - parseInt(tin.slice(0, 9), 10) % 97; + var checkdigits = parseInt(tin.slice(9, 11), 10); + + if (checksum !== checkdigits) { + checksum = 97 - parseInt("2".concat(tin.slice(0, 9)), 10) % 97; + + if (checksum !== checkdigits) { + return false; + } + } + + return true; +} +/* + * fr-FR validation function + * (Numéro fiscal de référence (numéro SPI), persons only) + * Verify TIN validity by calculating check (last three) digits + */ + + +function frFrCheck(tin) { + tin = tin.replace(/\s/g, ''); + var checksum = parseInt(tin.slice(0, 10), 10) % 511; + var checkdigits = parseInt(tin.slice(10, 13), 10); + return checksum === checkdigits; +} +/* + * fr/lb-LU validation function + * (numéro d’identification personnelle, persons only) + * Verify birth date validity and run Luhn and Verhoeff checks + */ + + +function frLuCheck(tin) { + // Extract date and check validity + var date = "".concat(tin.slice(0, 4), "/").concat(tin.slice(4, 6), "/").concat(tin.slice(6, 8)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Run Luhn check + + + if (!algorithms.luhnCheck(tin.slice(0, 12))) { + return false; + } // Remove Luhn check digit and run Verhoeff check + + + return algorithms.verhoeffCheck("".concat(tin.slice(0, 11)).concat(tin[12])); +} +/* + * hr-HR validation function + * (Osobni identifikacijski broj (OIB), persons/entities) + * Verify TIN validity by calling iso7064Check(digits) + */ + + +function hrHrCheck(tin) { + return algorithms.iso7064Check(tin); +} +/* + * hu-HU validation function + * (Adóazonosító jel, persons only) + * Verify TIN validity by calculating check (last) digit + */ + + +function huHuCheck(tin) { + // split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 8; + + for (var i = 1; i < 9; i++) { + checksum += digits[i] * (i + 1); + } + + return checksum % 11 === digits[9]; +} +/* + * lt-LT validation function (should go here if needed) + * (Asmens kodas, persons/entities respectively) + * Current validation check is alias of etEeCheck- same format applies + */ + +/* + * it-IT first/last name validity check + * Accepts it-IT TIN-encoded names as a three-element character array and checks their validity + * Due to lack of clarity between resources ("Are only Italian consonants used? + * What happens if a person has X in their name?" etc.) only two test conditions + * have been implemented: + * Vowels may only be followed by other vowels or an X character + * and X characters after vowels may only be followed by other X characters. + */ + + +function itItNameCheck(name) { + // true at the first occurence of a vowel + var vowelflag = false; // true at the first occurence of an X AFTER vowel + // (to properly handle last names with X as consonant) + + var xflag = false; + + for (var i = 0; i < 3; i++) { + if (!vowelflag && /[AEIOU]/.test(name[i])) { + vowelflag = true; + } else if (!xflag && vowelflag && name[i] === 'X') { + xflag = true; + } else if (i > 0) { + if (vowelflag && !xflag) { + if (!/[AEIOU]/.test(name[i])) { + return false; + } + } + + if (xflag) { + if (!/X/.test(name[i])) { + return false; + } + } + } + } + + return true; +} +/* + * it-IT validation function + * (Codice fiscale (TIN-IT), persons only) + * Verify name, birth date and codice catastale validity + * and calculate check character. + * Material not in DG-TAXUD document sourced from: + * `https://en.wikipedia.org/wiki/Italian_fiscal_code` + */ + + +function itItCheck(tin) { + // Capitalize and split characters into an array for further processing + var chars = tin.toUpperCase().split(''); // Check first and last name validity calling itItNameCheck() + + if (!itItNameCheck(chars.slice(0, 3))) { + return false; + } + + if (!itItNameCheck(chars.slice(3, 6))) { + return false; + } // Convert letters in number spaces back to numbers if any + + + var number_locations = [6, 7, 9, 10, 12, 13, 14]; + var number_replace = { + L: '0', + M: '1', + N: '2', + P: '3', + Q: '4', + R: '5', + S: '6', + T: '7', + U: '8', + V: '9' + }; + + for (var _i4 = 0, _number_locations = number_locations; _i4 < _number_locations.length; _i4++) { + var i = _number_locations[_i4]; + + if (chars[i] in number_replace) { + chars.splice(i, 1, number_replace[chars[i]]); + } + } // Extract month and day, and check date validity + + + var month_replace = { + A: '01', + B: '02', + C: '03', + D: '04', + E: '05', + H: '06', + L: '07', + M: '08', + P: '09', + R: '10', + S: '11', + T: '12' + }; + var month = month_replace[chars[8]]; + var day = parseInt(chars[9] + chars[10], 10); + + if (day > 40) { + day -= 40; + } + + if (day < 10) { + day = "0".concat(day); + } + + var date = "".concat(chars[6]).concat(chars[7], "/").concat(month, "/").concat(day); + + if (!(0, _isDate.default)(date, 'YY/MM/DD')) { + return false; + } // Calculate check character by adding up even and odd characters as numbers + + + var checksum = 0; + + for (var _i5 = 1; _i5 < chars.length - 1; _i5 += 2) { + var char_to_int = parseInt(chars[_i5], 10); + + if (isNaN(char_to_int)) { + char_to_int = chars[_i5].charCodeAt(0) - 65; + } + + checksum += char_to_int; + } + + var odd_convert = { + // Maps of characters at odd places + A: 1, + B: 0, + C: 5, + D: 7, + E: 9, + F: 13, + G: 15, + H: 17, + I: 19, + J: 21, + K: 2, + L: 4, + M: 18, + N: 20, + O: 11, + P: 3, + Q: 6, + R: 8, + S: 12, + T: 14, + U: 16, + V: 10, + W: 22, + X: 25, + Y: 24, + Z: 23, + 0: 1, + 1: 0 + }; + + for (var _i6 = 0; _i6 < chars.length - 1; _i6 += 2) { + var _char_to_int = 0; + + if (chars[_i6] in odd_convert) { + _char_to_int = odd_convert[chars[_i6]]; + } else { + var multiplier = parseInt(chars[_i6], 10); + _char_to_int = 2 * multiplier + 1; + + if (multiplier > 4) { + _char_to_int += 2; + } + } + + checksum += _char_to_int; + } + + if (String.fromCharCode(65 + checksum % 26) !== chars[15]) { + return false; + } + + return true; +} +/* + * lv-LV validation function + * (Personas kods (PK), persons only) + * Check validity of birth date and calculate check (last) digit + * Support only for old format numbers (not starting with '32', issued before 2017/07/01) + * Material not in DG TAXUD document sourced from: + * `https://boot.ritakafija.lv/forums/index.php?/topic/88314-personas-koda-algoritms-%C4%8Deksumma/` + */ + + +function lvLvCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract date from TIN + + var day = tin.slice(0, 2); + + if (day !== '32') { + // No date/checksum check if new format + var month = tin.slice(2, 4); + + if (month !== '00') { + // No date check if unknown month + var full_year = tin.slice(4, 6); + + switch (tin[6]) { + case '0': + full_year = "18".concat(full_year); + break; + + case '1': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(2, 4), "/").concat(day); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } + } // Calculate check digit + + + var checksum = 1101; + var multip_lookup = [1, 6, 3, 7, 9, 10, 5, 8, 4, 2]; + + for (var i = 0; i < tin.length - 1; i++) { + checksum -= parseInt(tin[i], 10) * multip_lookup[i]; + } + + return parseInt(tin[10], 10) === checksum % 11; + } + + return true; +} +/* + * mt-MT validation function + * (Identity Card Number or Unique Taxpayer Reference, persons/entities) + * Verify Identity Card Number structure (no other tests found) + */ + + +function mtMtCheck(tin) { + if (tin.length !== 9) { + // No tests for UTR + var chars = tin.toUpperCase().split(''); // Fill with zeros if smaller than proper + + while (chars.length < 8) { + chars.unshift(0); + } // Validate format according to last character + + + switch (tin[7]) { + case 'A': + case 'P': + if (parseInt(chars[6], 10) === 0) { + return false; + } + + break; + + default: + { + var first_part = parseInt(chars.join('').slice(0, 5), 10); + + if (first_part > 32000) { + return false; + } + + var second_part = parseInt(chars.join('').slice(5, 7), 10); + + if (first_part === second_part) { + return false; + } + } + } + } + + return true; +} +/* + * nl-NL validation function + * (Burgerservicenummer (BSN) or Rechtspersonen Samenwerkingsverbanden Informatie Nummer (RSIN), + * persons/entities respectively) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function nlNlCheck(tin) { + return algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 8).map(function (a) { + return parseInt(a, 10); + }), 9) % 11 === parseInt(tin[8], 10); +} +/* + * pl-PL validation function + * (Powszechny Elektroniczny System Ewidencji Ludności (PESEL) + * or Numer identyfikacji podatkowej (NIP), persons/entities) + * Verify TIN validity by validating birth date (PESEL) and calculating check (last) digit + */ + + +function plPlCheck(tin) { + // NIP + if (tin.length === 10) { + // Calculate last digit by multiplying with lookup + var lookup = [6, 5, 7, 2, 3, 4, 5, 6, 7]; + var _checksum = 0; + + for (var i = 0; i < lookup.length; i++) { + _checksum += parseInt(tin[i], 10) * lookup[i]; + } + + _checksum %= 11; + + if (_checksum === 10) { + return false; + } + + return _checksum === parseInt(tin[9], 10); + } // PESEL + // Extract full year using month + + + var full_year = tin.slice(0, 2); + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 80) { + full_year = "18".concat(full_year); + month -= 80; + } else if (month > 60) { + full_year = "22".concat(full_year); + month -= 60; + } else if (month > 40) { + full_year = "21".concat(full_year); + month -= 40; + } else if (month > 20) { + full_year = "20".concat(full_year); + month -= 20; + } else { + full_year = "19".concat(full_year); + } // Add leading zero to month if needed + + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Calculate last digit by mulitplying with odd one-digit numbers except 5 + + + var checksum = 0; + var multiplier = 1; + + for (var _i7 = 0; _i7 < tin.length - 1; _i7++) { + checksum += parseInt(tin[_i7], 10) * multiplier % 10; + multiplier += 2; + + if (multiplier > 10) { + multiplier = 1; + } else if (multiplier === 5) { + multiplier += 2; + } + } + + checksum = 10 - checksum % 10; + return checksum === parseInt(tin[10], 10); +} +/* +* pt-BR validation function +* (Cadastro de Pessoas Físicas (CPF, persons) +* Cadastro Nacional de Pessoas Jurídicas (CNPJ, entities) +* Both inputs will be validated +*/ + + +function ptBrCheck(tin) { + if (tin.length === 11) { + var _sum; + + var remainder; + _sum = 0; + if ( // Reject known invalid CPFs + tin === '11111111111' || tin === '22222222222' || tin === '33333333333' || tin === '44444444444' || tin === '55555555555' || tin === '66666666666' || tin === '77777777777' || tin === '88888888888' || tin === '99999999999' || tin === '00000000000') return false; + + for (var i = 1; i <= 9; i++) { + _sum += parseInt(tin.substring(i - 1, i), 10) * (11 - i); + } + + remainder = _sum * 10 % 11; + if (remainder === 10) remainder = 0; + if (remainder !== parseInt(tin.substring(9, 10), 10)) return false; + _sum = 0; + + for (var _i8 = 1; _i8 <= 10; _i8++) { + _sum += parseInt(tin.substring(_i8 - 1, _i8), 10) * (12 - _i8); + } + + remainder = _sum * 10 % 11; + if (remainder === 10) remainder = 0; + if (remainder !== parseInt(tin.substring(10, 11), 10)) return false; + return true; + } + + if ( // Reject know invalid CNPJs + tin === '00000000000000' || tin === '11111111111111' || tin === '22222222222222' || tin === '33333333333333' || tin === '44444444444444' || tin === '55555555555555' || tin === '66666666666666' || tin === '77777777777777' || tin === '88888888888888' || tin === '99999999999999') { + return false; + } + + var length = tin.length - 2; + var identifiers = tin.substring(0, length); + var verificators = tin.substring(length); + var sum = 0; + var pos = length - 7; + + for (var _i9 = length; _i9 >= 1; _i9--) { + sum += identifiers.charAt(length - _i9) * pos; + pos -= 1; + + if (pos < 2) { + pos = 9; + } + } + + var result = sum % 11 < 2 ? 0 : 11 - sum % 11; + + if (result !== parseInt(verificators.charAt(0), 10)) { + return false; + } + + length += 1; + identifiers = tin.substring(0, length); + sum = 0; + pos = length - 7; + + for (var _i10 = length; _i10 >= 1; _i10--) { + sum += identifiers.charAt(length - _i10) * pos; + pos -= 1; + + if (pos < 2) { + pos = 9; + } + } + + result = sum % 11 < 2 ? 0 : 11 - sum % 11; + + if (result !== parseInt(verificators.charAt(1), 10)) { + return false; + } + + return true; +} +/* + * pt-PT validation function + * (Número de identificação fiscal (NIF), persons/entities) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function ptPtCheck(tin) { + var checksum = 11 - algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 8).map(function (a) { + return parseInt(a, 10); + }), 9) % 11; + + if (checksum > 9) { + return parseInt(tin[8], 10) === 0; + } + + return checksum === parseInt(tin[8], 10); +} +/* + * ro-RO validation function + * (Cod Numeric Personal (CNP) or Cod de înregistrare fiscală (CIF), + * persons only) + * Verify CNP validity by calculating check (last) digit (test not found for CIF) + * Material not in DG TAXUD document sourced from: + * `https://en.wikipedia.org/wiki/National_identification_number#Romania` + */ + + +function roRoCheck(tin) { + if (tin.slice(0, 4) !== '9000') { + // No test found for this format + // Extract full year using century digit if possible + var full_year = tin.slice(1, 3); + + switch (tin[0]) { + case '1': + case '2': + full_year = "19".concat(full_year); + break; + + case '3': + case '4': + full_year = "18".concat(full_year); + break; + + case '5': + case '6': + full_year = "20".concat(full_year); + break; + + default: + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(3, 5), "/").concat(tin.slice(5, 7)); + + if (date.length === 8) { + if (!(0, _isDate.default)(date, 'YY/MM/DD')) { + return false; + } + } else if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } // Calculate check digit + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var multipliers = [2, 7, 9, 1, 4, 6, 3, 5, 8, 2, 7, 9]; + var checksum = 0; + + for (var i = 0; i < multipliers.length; i++) { + checksum += digits[i] * multipliers[i]; + } + + if (checksum % 11 === 10) { + return digits[12] === 1; + } + + return digits[12] === checksum % 11; + } + + return true; +} +/* + * sk-SK validation function + * (Rodné číslo (RČ) or bezvýznamové identifikačné číslo (BIČ), persons only) + * Checks validity of pre-1954 birth numbers (rodné číslo) only + * Due to the introduction of the pseudo-random BIČ it is not possible to test + * post-1954 birth numbers without knowing whether they are BIČ or RČ beforehand + */ + + +function skSkCheck(tin) { + if (tin.length === 9) { + tin = tin.replace(/\W/, ''); + + if (tin.slice(6) === '000') { + return false; + } // Three-zero serial not assigned before 1954 + // Extract full year from TIN length + + + var full_year = parseInt(tin.slice(0, 2), 10); + + if (full_year > 53) { + return false; + } + + if (full_year < 10) { + full_year = "190".concat(full_year); + } else { + full_year = "19".concat(full_year); + } // Extract month from TIN and normalize + + + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 50) { + month -= 50; + } + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } + } + + return true; +} +/* + * sl-SI validation function + * (Davčna številka, persons/entities) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function slSiCheck(tin) { + var checksum = 11 - algorithms.reverseMultiplyAndSum(tin.split('').slice(0, 7).map(function (a) { + return parseInt(a, 10); + }), 8) % 11; + + if (checksum === 10) { + return parseInt(tin[7], 10) === 0; + } + + return checksum === parseInt(tin[7], 10); +} +/* + * sv-SE validation function + * (Personnummer or samordningsnummer, persons only) + * Checks validity of birth date and calls luhnCheck() to validate check (last) digit + */ + + +function svSeCheck(tin) { + // Make copy of TIN and normalize to two-digit year form + var tin_copy = tin.slice(0); + + if (tin.length > 11) { + tin_copy = tin_copy.slice(2); + } // Extract date of birth + + + var full_year = ''; + var month = tin_copy.slice(2, 4); + var day = parseInt(tin_copy.slice(4, 6), 10); + + if (tin.length > 11) { + full_year = tin.slice(0, 4); + } else { + full_year = tin.slice(0, 2); + + if (tin.length === 11 && day < 60) { + // Extract full year from centenarian symbol + // Should work just fine until year 10000 or so + var current_year = new Date().getFullYear().toString(); + var current_century = parseInt(current_year.slice(0, 2), 10); + current_year = parseInt(current_year, 10); + + if (tin[6] === '-') { + if (parseInt("".concat(current_century).concat(full_year), 10) > current_year) { + full_year = "".concat(current_century - 1).concat(full_year); + } else { + full_year = "".concat(current_century).concat(full_year); + } + } else { + full_year = "".concat(current_century - 1).concat(full_year); + + if (current_year - parseInt(full_year, 10) < 100) { + return false; + } + } + } + } // Normalize day and check date validity + + + if (day > 60) { + day -= 60; + } + + if (day < 10) { + day = "0".concat(day); + } + + var date = "".concat(full_year, "/").concat(month, "/").concat(day); + + if (date.length === 8) { + if (!(0, _isDate.default)(date, 'YY/MM/DD')) { + return false; + } + } else if (!(0, _isDate.default)(date, 'YYYY/MM/DD')) { + return false; + } + + return algorithms.luhnCheck(tin.replace(/\W/, '')); +} // Locale lookup objects + +/* + * Tax id regex formats for various locales + * + * Where not explicitly specified in DG-TAXUD document both + * uppercase and lowercase letters are acceptable. + */ + + +var taxIdFormat = { + 'bg-BG': /^\d{10}$/, + 'cs-CZ': /^\d{6}\/{0,1}\d{3,4}$/, + 'de-AT': /^\d{9}$/, + 'de-DE': /^[1-9]\d{10}$/, + 'dk-DK': /^\d{6}-{0,1}\d{4}$/, + 'el-CY': /^[09]\d{7}[A-Z]$/, + 'el-GR': /^([0-4]|[7-9])\d{8}$/, + 'en-GB': /^\d{10}$|^(?!GB|NK|TN|ZZ)(?![DFIQUV])[A-Z](?![DFIQUVO])[A-Z]\d{6}[ABCD ]$/i, + 'en-IE': /^\d{7}[A-W][A-IW]{0,1}$/i, + 'en-US': /^\d{2}[- ]{0,1}\d{7}$/, + 'es-ES': /^(\d{0,8}|[XYZKLM]\d{7})[A-HJ-NP-TV-Z]$/i, + 'et-EE': /^[1-6]\d{6}(00[1-9]|0[1-9][0-9]|[1-6][0-9]{2}|70[0-9]|710)\d$/, + 'fi-FI': /^\d{6}[-+A]\d{3}[0-9A-FHJ-NPR-Y]$/i, + 'fr-BE': /^\d{11}$/, + 'fr-FR': /^[0-3]\d{12}$|^[0-3]\d\s\d{2}(\s\d{3}){3}$/, + // Conforms both to official spec and provided example + 'fr-LU': /^\d{13}$/, + 'hr-HR': /^\d{11}$/, + 'hu-HU': /^8\d{9}$/, + 'it-IT': /^[A-Z]{6}[L-NP-V0-9]{2}[A-EHLMPRST][L-NP-V0-9]{2}[A-ILMZ][L-NP-V0-9]{3}[A-Z]$/i, + 'lv-LV': /^\d{6}-{0,1}\d{5}$/, + // Conforms both to DG TAXUD spec and original research + 'mt-MT': /^\d{3,7}[APMGLHBZ]$|^([1-8])\1\d{7}$/i, + 'nl-NL': /^\d{9}$/, + 'pl-PL': /^\d{10,11}$/, + 'pt-BR': /(?:^\d{11}$)|(?:^\d{14}$)/, + 'pt-PT': /^\d{9}$/, + 'ro-RO': /^\d{13}$/, + 'sk-SK': /^\d{6}\/{0,1}\d{3,4}$/, + 'sl-SI': /^[1-9]\d{7}$/, + 'sv-SE': /^(\d{6}[-+]{0,1}\d{4}|(18|19|20)\d{6}[-+]{0,1}\d{4})$/ +}; // taxIdFormat locale aliases + +taxIdFormat['lb-LU'] = taxIdFormat['fr-LU']; +taxIdFormat['lt-LT'] = taxIdFormat['et-EE']; +taxIdFormat['nl-BE'] = taxIdFormat['fr-BE']; // Algorithmic tax id check functions for various locales + +var taxIdCheck = { + 'bg-BG': bgBgCheck, + 'cs-CZ': csCzCheck, + 'de-AT': deAtCheck, + 'de-DE': deDeCheck, + 'dk-DK': dkDkCheck, + 'el-CY': elCyCheck, + 'el-GR': elGrCheck, + 'en-IE': enIeCheck, + 'en-US': enUsCheck, + 'es-ES': esEsCheck, + 'et-EE': etEeCheck, + 'fi-FI': fiFiCheck, + 'fr-BE': frBeCheck, + 'fr-FR': frFrCheck, + 'fr-LU': frLuCheck, + 'hr-HR': hrHrCheck, + 'hu-HU': huHuCheck, + 'it-IT': itItCheck, + 'lv-LV': lvLvCheck, + 'mt-MT': mtMtCheck, + 'nl-NL': nlNlCheck, + 'pl-PL': plPlCheck, + 'pt-BR': ptBrCheck, + 'pt-PT': ptPtCheck, + 'ro-RO': roRoCheck, + 'sk-SK': skSkCheck, + 'sl-SI': slSiCheck, + 'sv-SE': svSeCheck +}; // taxIdCheck locale aliases + +taxIdCheck['lb-LU'] = taxIdCheck['fr-LU']; +taxIdCheck['lt-LT'] = taxIdCheck['et-EE']; +taxIdCheck['nl-BE'] = taxIdCheck['fr-BE']; // Regexes for locales where characters should be omitted before checking format + +var allsymbols = /[-\\\/!@#$%\^&\*\(\)\+\=\[\]]+/g; +var sanitizeRegexes = { + 'de-AT': allsymbols, + 'de-DE': /[\/\\]/g, + 'fr-BE': allsymbols +}; // sanitizeRegexes locale aliases + +sanitizeRegexes['nl-BE'] = sanitizeRegexes['fr-BE']; +/* + * Validator function + * Return true if the passed string is a valid tax identification number + * for the specified locale. + * Throw an error exception if the locale is not supported. + */ + +function isTaxID(str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + (0, _assertString.default)(str); // Copy TIN to avoid replacement if sanitized + + var strcopy = str.slice(0); + + if (locale in taxIdFormat) { + if (locale in sanitizeRegexes) { + strcopy = strcopy.replace(sanitizeRegexes[locale], ''); + } + + if (!taxIdFormat[locale].test(strcopy)) { + return false; + } + + if (locale in taxIdCheck) { + return taxIdCheck[locale](strcopy); + } // Fallthrough; not all locales have algorithmic checks + + + return true; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isURL.js b/node_modules/validator/lib/isURL.js new file mode 100644 index 0000000..b8d5e87 --- /dev/null +++ b/node_modules/validator/lib/isURL.js @@ -0,0 +1,208 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isURL; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isFQDN = _interopRequireDefault(require("./isFQDN")); + +var _isIP = _interopRequireDefault(require("./isIP")); + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); } + +function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } + +function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } + +function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } + +function _iterableToArrayLimit(arr, i) { if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } + +function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } + +/* +options for isURL method + +require_protocol - if set as true isURL will return false if protocol is not present in the URL +require_valid_protocol - isURL will check if the URL's protocol is present in the protocols option +protocols - valid protocols can be modified with this option +require_host - if set as false isURL will not check if host is present in the URL +require_port - if set as true isURL will check if port is present in the URL +allow_protocol_relative_urls - if set as true protocol relative URLs will be allowed +validate_length - if set as false isURL will skip string length validation (IE maximum is 2083) + +*/ +var default_url_options = { + protocols: ['http', 'https', 'ftp'], + require_tld: true, + require_protocol: false, + require_host: true, + require_port: false, + require_valid_protocol: true, + allow_underscores: false, + allow_trailing_dot: false, + allow_protocol_relative_urls: false, + allow_fragments: true, + allow_query_components: true, + validate_length: true +}; +var wrapped_ipv6 = /^\[([^\]]+)\](?::([0-9]+))?$/; + +function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +} + +function checkHost(host, matches) { + for (var i = 0; i < matches.length; i++) { + var match = matches[i]; + + if (host === match || isRegExp(match) && match.test(host)) { + return true; + } + } + + return false; +} + +function isURL(url, options) { + (0, _assertString.default)(url); + + if (!url || /[\s<>]/.test(url)) { + return false; + } + + if (url.indexOf('mailto:') === 0) { + return false; + } + + options = (0, _merge.default)(options, default_url_options); + + if (options.validate_length && url.length >= 2083) { + return false; + } + + if (!options.allow_fragments && url.includes('#')) { + return false; + } + + if (!options.allow_query_components && (url.includes('?') || url.includes('&'))) { + return false; + } + + var protocol, auth, host, hostname, port, port_str, split, ipv6; + split = url.split('#'); + url = split.shift(); + split = url.split('?'); + url = split.shift(); + split = url.split('://'); + + if (split.length > 1) { + protocol = split.shift().toLowerCase(); + + if (options.require_valid_protocol && options.protocols.indexOf(protocol) === -1) { + return false; + } + } else if (options.require_protocol) { + return false; + } else if (url.substr(0, 2) === '//') { + if (!options.allow_protocol_relative_urls) { + return false; + } + + split[0] = url.substr(2); + } + + url = split.join('://'); + + if (url === '') { + return false; + } + + split = url.split('/'); + url = split.shift(); + + if (url === '' && !options.require_host) { + return true; + } + + split = url.split('@'); + + if (split.length > 1) { + if (options.disallow_auth) { + return false; + } + + if (split[0] === '') { + return false; + } + + auth = split.shift(); + + if (auth.indexOf(':') >= 0 && auth.split(':').length > 2) { + return false; + } + + var _auth$split = auth.split(':'), + _auth$split2 = _slicedToArray(_auth$split, 2), + user = _auth$split2[0], + password = _auth$split2[1]; + + if (user === '' && password === '') { + return false; + } + } + + hostname = split.join('@'); + port_str = null; + ipv6 = null; + var ipv6_match = hostname.match(wrapped_ipv6); + + if (ipv6_match) { + host = ''; + ipv6 = ipv6_match[1]; + port_str = ipv6_match[2] || null; + } else { + split = hostname.split(':'); + host = split.shift(); + + if (split.length) { + port_str = split.join(':'); + } + } + + if (port_str !== null && port_str.length > 0) { + port = parseInt(port_str, 10); + + if (!/^[0-9]+$/.test(port_str) || port <= 0 || port > 65535) { + return false; + } + } else if (options.require_port) { + return false; + } + + if (options.host_whitelist) { + return checkHost(host, options.host_whitelist); + } + + if (!(0, _isIP.default)(host) && !(0, _isFQDN.default)(host, options) && (!ipv6 || !(0, _isIP.default)(ipv6, 6))) { + return false; + } + + host = host || ipv6; + + if (options.host_blacklist && checkHost(host, options.host_blacklist)) { + return false; + } + + return true; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isUUID.js b/node_modules/validator/lib/isUUID.js new file mode 100644 index 0000000..f2fdc79 --- /dev/null +++ b/node_modules/validator/lib/isUUID.js @@ -0,0 +1,28 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isUUID; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var uuid = { + 1: /^[0-9A-F]{8}-[0-9A-F]{4}-1[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 2: /^[0-9A-F]{8}-[0-9A-F]{4}-2[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 3: /^[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 4: /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, + 5: /^[0-9A-F]{8}-[0-9A-F]{4}-5[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, + all: /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i +}; + +function isUUID(str, version) { + (0, _assertString.default)(str); + var pattern = uuid[![undefined, null].includes(version) ? version : 'all']; + return !!pattern && pattern.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isUppercase.js b/node_modules/validator/lib/isUppercase.js new file mode 100644 index 0000000..c1c02f9 --- /dev/null +++ b/node_modules/validator/lib/isUppercase.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isUppercase; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isUppercase(str) { + (0, _assertString.default)(str); + return str === str.toUpperCase(); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isVAT.js b/node_modules/validator/lib/isVAT.js new file mode 100644 index 0000000..bfd7beb --- /dev/null +++ b/node_modules/validator/lib/isVAT.js @@ -0,0 +1,29 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isVAT; +exports.vatMatchers = void 0; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var vatMatchers = { + GB: /^GB((\d{3} \d{4} ([0-8][0-9]|9[0-6]))|(\d{9} \d{3})|(((GD[0-4])|(HA[5-9]))[0-9]{2}))$/, + IT: /^(IT)?[0-9]{11}$/, + NL: /^(NL)?[0-9]{9}B[0-9]{2}$/ +}; +exports.vatMatchers = vatMatchers; + +function isVAT(str, countryCode) { + (0, _assertString.default)(str); + (0, _assertString.default)(countryCode); + + if (countryCode in vatMatchers) { + return vatMatchers[countryCode].test(str); + } + + throw new Error("Invalid country code: '".concat(countryCode, "'")); +} \ No newline at end of file diff --git a/node_modules/validator/lib/isVariableWidth.js b/node_modules/validator/lib/isVariableWidth.js new file mode 100644 index 0000000..6bf226e --- /dev/null +++ b/node_modules/validator/lib/isVariableWidth.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isVariableWidth; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _isFullWidth = require("./isFullWidth"); + +var _isHalfWidth = require("./isHalfWidth"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isVariableWidth(str) { + (0, _assertString.default)(str); + return _isFullWidth.fullWidth.test(str) && _isHalfWidth.halfWidth.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/isWhitelisted.js b/node_modules/validator/lib/isWhitelisted.js new file mode 100644 index 0000000..5a80a1b --- /dev/null +++ b/node_modules/validator/lib/isWhitelisted.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isWhitelisted; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isWhitelisted(str, chars) { + (0, _assertString.default)(str); + + for (var i = str.length - 1; i >= 0; i--) { + if (chars.indexOf(str[i]) === -1) { + return false; + } + } + + return true; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/ltrim.js b/node_modules/validator/lib/ltrim.js new file mode 100644 index 0000000..fc39160 --- /dev/null +++ b/node_modules/validator/lib/ltrim.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = ltrim; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function ltrim(str, chars) { + (0, _assertString.default)(str); // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping + + var pattern = chars ? new RegExp("^[".concat(chars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), "]+"), 'g') : /^\s+/g; + return str.replace(pattern, ''); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/matches.js b/node_modules/validator/lib/matches.js new file mode 100644 index 0000000..ea01ac1 --- /dev/null +++ b/node_modules/validator/lib/matches.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = matches; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function matches(str, pattern, modifiers) { + (0, _assertString.default)(str); + + if (Object.prototype.toString.call(pattern) !== '[object RegExp]') { + pattern = new RegExp(pattern, modifiers); + } + + return pattern.test(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/normalizeEmail.js b/node_modules/validator/lib/normalizeEmail.js new file mode 100644 index 0000000..990f431 --- /dev/null +++ b/node_modules/validator/lib/normalizeEmail.js @@ -0,0 +1,151 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = normalizeEmail; + +var _merge = _interopRequireDefault(require("./util/merge")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var default_normalize_email_options = { + // The following options apply to all email addresses + // Lowercases the local part of the email address. + // Please note this may violate RFC 5321 as per http://stackoverflow.com/a/9808332/192024). + // The domain is always lowercased, as per RFC 1035 + all_lowercase: true, + // The following conversions are specific to GMail + // Lowercases the local part of the GMail address (known to be case-insensitive) + gmail_lowercase: true, + // Removes dots from the local part of the email address, as that's ignored by GMail + gmail_remove_dots: true, + // Removes the subaddress (e.g. "+foo") from the email address + gmail_remove_subaddress: true, + // Conversts the googlemail.com domain to gmail.com + gmail_convert_googlemaildotcom: true, + // The following conversions are specific to Outlook.com / Windows Live / Hotmail + // Lowercases the local part of the Outlook.com address (known to be case-insensitive) + outlookdotcom_lowercase: true, + // Removes the subaddress (e.g. "+foo") from the email address + outlookdotcom_remove_subaddress: true, + // The following conversions are specific to Yahoo + // Lowercases the local part of the Yahoo address (known to be case-insensitive) + yahoo_lowercase: true, + // Removes the subaddress (e.g. "-foo") from the email address + yahoo_remove_subaddress: true, + // The following conversions are specific to Yandex + // Lowercases the local part of the Yandex address (known to be case-insensitive) + yandex_lowercase: true, + // The following conversions are specific to iCloud + // Lowercases the local part of the iCloud address (known to be case-insensitive) + icloud_lowercase: true, + // Removes the subaddress (e.g. "+foo") from the email address + icloud_remove_subaddress: true +}; // List of domains used by iCloud + +var icloud_domains = ['icloud.com', 'me.com']; // List of domains used by Outlook.com and its predecessors +// This list is likely incomplete. +// Partial reference: +// https://blogs.office.com/2013/04/17/outlook-com-gets-two-step-verification-sign-in-by-alias-and-new-international-domains/ + +var outlookdotcom_domains = ['hotmail.at', 'hotmail.be', 'hotmail.ca', 'hotmail.cl', 'hotmail.co.il', 'hotmail.co.nz', 'hotmail.co.th', 'hotmail.co.uk', 'hotmail.com', 'hotmail.com.ar', 'hotmail.com.au', 'hotmail.com.br', 'hotmail.com.gr', 'hotmail.com.mx', 'hotmail.com.pe', 'hotmail.com.tr', 'hotmail.com.vn', 'hotmail.cz', 'hotmail.de', 'hotmail.dk', 'hotmail.es', 'hotmail.fr', 'hotmail.hu', 'hotmail.id', 'hotmail.ie', 'hotmail.in', 'hotmail.it', 'hotmail.jp', 'hotmail.kr', 'hotmail.lv', 'hotmail.my', 'hotmail.ph', 'hotmail.pt', 'hotmail.sa', 'hotmail.sg', 'hotmail.sk', 'live.be', 'live.co.uk', 'live.com', 'live.com.ar', 'live.com.mx', 'live.de', 'live.es', 'live.eu', 'live.fr', 'live.it', 'live.nl', 'msn.com', 'outlook.at', 'outlook.be', 'outlook.cl', 'outlook.co.il', 'outlook.co.nz', 'outlook.co.th', 'outlook.com', 'outlook.com.ar', 'outlook.com.au', 'outlook.com.br', 'outlook.com.gr', 'outlook.com.pe', 'outlook.com.tr', 'outlook.com.vn', 'outlook.cz', 'outlook.de', 'outlook.dk', 'outlook.es', 'outlook.fr', 'outlook.hu', 'outlook.id', 'outlook.ie', 'outlook.in', 'outlook.it', 'outlook.jp', 'outlook.kr', 'outlook.lv', 'outlook.my', 'outlook.ph', 'outlook.pt', 'outlook.sa', 'outlook.sg', 'outlook.sk', 'passport.com']; // List of domains used by Yahoo Mail +// This list is likely incomplete + +var yahoo_domains = ['rocketmail.com', 'yahoo.ca', 'yahoo.co.uk', 'yahoo.com', 'yahoo.de', 'yahoo.fr', 'yahoo.in', 'yahoo.it', 'ymail.com']; // List of domains used by yandex.ru + +var yandex_domains = ['yandex.ru', 'yandex.ua', 'yandex.kz', 'yandex.com', 'yandex.by', 'ya.ru']; // replace single dots, but not multiple consecutive dots + +function dotsReplacer(match) { + if (match.length > 1) { + return match; + } + + return ''; +} + +function normalizeEmail(email, options) { + options = (0, _merge.default)(options, default_normalize_email_options); + var raw_parts = email.split('@'); + var domain = raw_parts.pop(); + var user = raw_parts.join('@'); + var parts = [user, domain]; // The domain is always lowercased, as it's case-insensitive per RFC 1035 + + parts[1] = parts[1].toLowerCase(); + + if (parts[1] === 'gmail.com' || parts[1] === 'googlemail.com') { + // Address is GMail + if (options.gmail_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (options.gmail_remove_dots) { + // this does not replace consecutive dots like example..email@gmail.com + parts[0] = parts[0].replace(/\.+/g, dotsReplacer); + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.gmail_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + + parts[1] = options.gmail_convert_googlemaildotcom ? 'gmail.com' : parts[1]; + } else if (icloud_domains.indexOf(parts[1]) >= 0) { + // Address is iCloud + if (options.icloud_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.icloud_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (outlookdotcom_domains.indexOf(parts[1]) >= 0) { + // Address is Outlook.com + if (options.outlookdotcom_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.outlookdotcom_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (yahoo_domains.indexOf(parts[1]) >= 0) { + // Address is Yahoo + if (options.yahoo_remove_subaddress) { + var components = parts[0].split('-'); + parts[0] = components.length > 1 ? components.slice(0, -1).join('-') : components[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.yahoo_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (yandex_domains.indexOf(parts[1]) >= 0) { + if (options.all_lowercase || options.yandex_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + + parts[1] = 'yandex.ru'; // all yandex domains are equal, 1st preferred + } else if (options.all_lowercase) { + // Any other address + parts[0] = parts[0].toLowerCase(); + } + + return parts.join('@'); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/rtrim.js b/node_modules/validator/lib/rtrim.js new file mode 100644 index 0000000..e056a47 --- /dev/null +++ b/node_modules/validator/lib/rtrim.js @@ -0,0 +1,32 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rtrim; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function rtrim(str, chars) { + (0, _assertString.default)(str); + + if (chars) { + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping + var pattern = new RegExp("[".concat(chars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), "]+$"), 'g'); + return str.replace(pattern, ''); + } // Use a faster and more safe than regex trim method https://blog.stevenlevithan.com/archives/faster-trim-javascript + + + var strIndex = str.length - 1; + + while (/\s/.test(str.charAt(strIndex))) { + strIndex -= 1; + } + + return str.slice(0, strIndex + 1); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/stripLow.js b/node_modules/validator/lib/stripLow.js new file mode 100644 index 0000000..aec2e0b --- /dev/null +++ b/node_modules/validator/lib/stripLow.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = stripLow; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +var _blacklist = _interopRequireDefault(require("./blacklist")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stripLow(str, keep_new_lines) { + (0, _assertString.default)(str); + var chars = keep_new_lines ? '\\x00-\\x09\\x0B\\x0C\\x0E-\\x1F\\x7F' : '\\x00-\\x1F\\x7F'; + return (0, _blacklist.default)(str, chars); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/toBoolean.js b/node_modules/validator/lib/toBoolean.js new file mode 100644 index 0000000..a1b1fe4 --- /dev/null +++ b/node_modules/validator/lib/toBoolean.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = toBoolean; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function toBoolean(str, strict) { + (0, _assertString.default)(str); + + if (strict) { + return str === '1' || /^true$/i.test(str); + } + + return str !== '0' && !/^false$/i.test(str) && str !== ''; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/toDate.js b/node_modules/validator/lib/toDate.js new file mode 100644 index 0000000..cb0756c --- /dev/null +++ b/node_modules/validator/lib/toDate.js @@ -0,0 +1,19 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = toDate; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function toDate(date) { + (0, _assertString.default)(date); + date = Date.parse(date); + return !isNaN(date) ? new Date(date) : null; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/toFloat.js b/node_modules/validator/lib/toFloat.js new file mode 100644 index 0000000..96adafd --- /dev/null +++ b/node_modules/validator/lib/toFloat.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = toFloat; + +var _isFloat = _interopRequireDefault(require("./isFloat")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function toFloat(str) { + if (!(0, _isFloat.default)(str)) return NaN; + return parseFloat(str); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/toInt.js b/node_modules/validator/lib/toInt.js new file mode 100644 index 0000000..4c0e7ad --- /dev/null +++ b/node_modules/validator/lib/toInt.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = toInt; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function toInt(str, radix) { + (0, _assertString.default)(str); + return parseInt(str, radix || 10); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/trim.js b/node_modules/validator/lib/trim.js new file mode 100644 index 0000000..497e3c3 --- /dev/null +++ b/node_modules/validator/lib/trim.js @@ -0,0 +1,19 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = trim; + +var _rtrim = _interopRequireDefault(require("./rtrim")); + +var _ltrim = _interopRequireDefault(require("./ltrim")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function trim(str, chars) { + return (0, _rtrim.default)((0, _ltrim.default)(str, chars), chars); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/unescape.js b/node_modules/validator/lib/unescape.js new file mode 100644 index 0000000..303d20c --- /dev/null +++ b/node_modules/validator/lib/unescape.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = unescape; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function unescape(str) { + (0, _assertString.default)(str); + return str.replace(/"/g, '"').replace(/'/g, "'").replace(/</g, '<').replace(/>/g, '>').replace(///g, '/').replace(/\/g, '\\').replace(/`/g, '`').replace(/&/g, '&'); // & replacement has to be the last one to prevent + // bugs with intermediate strings containing escape sequences + // See: https://github.com/validatorjs/validator.js/issues/1827 +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/util/algorithms.js b/node_modules/validator/lib/util/algorithms.js new file mode 100644 index 0000000..cf8e512 --- /dev/null +++ b/node_modules/validator/lib/util/algorithms.js @@ -0,0 +1,101 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.iso7064Check = iso7064Check; +exports.luhnCheck = luhnCheck; +exports.reverseMultiplyAndSum = reverseMultiplyAndSum; +exports.verhoeffCheck = verhoeffCheck; + +/** + * Algorithmic validation functions + * May be used as is or implemented in the workflow of other validators. + */ + +/* + * ISO 7064 validation function + * Called with a string of numbers (incl. check digit) + * to validate according to ISO 7064 (MOD 11, 10). + */ +function iso7064Check(str) { + var checkvalue = 10; + + for (var i = 0; i < str.length - 1; i++) { + checkvalue = (parseInt(str[i], 10) + checkvalue) % 10 === 0 ? 10 * 2 % 11 : (parseInt(str[i], 10) + checkvalue) % 10 * 2 % 11; + } + + checkvalue = checkvalue === 1 ? 0 : 11 - checkvalue; + return checkvalue === parseInt(str[10], 10); +} +/* + * Luhn (mod 10) validation function + * Called with a string of numbers (incl. check digit) + * to validate according to the Luhn algorithm. + */ + + +function luhnCheck(str) { + var checksum = 0; + var second = false; + + for (var i = str.length - 1; i >= 0; i--) { + if (second) { + var product = parseInt(str[i], 10) * 2; + + if (product > 9) { + // sum digits of product and add to checksum + checksum += product.toString().split('').map(function (a) { + return parseInt(a, 10); + }).reduce(function (a, b) { + return a + b; + }, 0); + } else { + checksum += product; + } + } else { + checksum += parseInt(str[i], 10); + } + + second = !second; + } + + return checksum % 10 === 0; +} +/* + * Reverse TIN multiplication and summation helper function + * Called with an array of single-digit integers and a base multiplier + * to calculate the sum of the digits multiplied in reverse. + * Normally used in variations of MOD 11 algorithmic checks. + */ + + +function reverseMultiplyAndSum(digits, base) { + var total = 0; + + for (var i = 0; i < digits.length; i++) { + total += digits[i] * (base - i); + } + + return total; +} +/* + * Verhoeff validation helper function + * Called with a string of numbers + * to validate according to the Verhoeff algorithm. + */ + + +function verhoeffCheck(str) { + var d_table = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 0, 6, 7, 8, 9, 5], [2, 3, 4, 0, 1, 7, 8, 9, 5, 6], [3, 4, 0, 1, 2, 8, 9, 5, 6, 7], [4, 0, 1, 2, 3, 9, 5, 6, 7, 8], [5, 9, 8, 7, 6, 0, 4, 3, 2, 1], [6, 5, 9, 8, 7, 1, 0, 4, 3, 2], [7, 6, 5, 9, 8, 2, 1, 0, 4, 3], [8, 7, 6, 5, 9, 3, 2, 1, 0, 4], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]; + var p_table = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 5, 7, 6, 2, 8, 3, 0, 9, 4], [5, 8, 0, 3, 7, 9, 6, 1, 4, 2], [8, 9, 1, 6, 0, 4, 3, 5, 2, 7], [9, 4, 5, 3, 1, 2, 6, 8, 7, 0], [4, 2, 8, 6, 5, 7, 3, 9, 0, 1], [2, 7, 9, 3, 8, 0, 6, 4, 1, 5], [7, 0, 4, 6, 9, 1, 3, 2, 5, 8]]; // Copy (to prevent replacement) and reverse + + var str_copy = str.split('').reverse().join(''); + var checksum = 0; + + for (var i = 0; i < str_copy.length; i++) { + checksum = d_table[checksum][p_table[i % 8][parseInt(str_copy[i], 10)]]; + } + + return checksum === 0; +} \ No newline at end of file diff --git a/node_modules/validator/lib/util/assertString.js b/node_modules/validator/lib/util/assertString.js new file mode 100644 index 0000000..2c508af --- /dev/null +++ b/node_modules/validator/lib/util/assertString.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = assertString; + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +function assertString(input) { + var isString = typeof input === 'string' || input instanceof String; + + if (!isString) { + var invalidType = _typeof(input); + + if (input === null) invalidType = 'null';else if (invalidType === 'object') invalidType = input.constructor.name; + throw new TypeError("Expected a string but received a ".concat(invalidType)); + } +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/util/includes.js b/node_modules/validator/lib/util/includes.js new file mode 100644 index 0000000..e061828 --- /dev/null +++ b/node_modules/validator/lib/util/includes.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var includes = function includes(arr, val) { + return arr.some(function (arrVal) { + return val === arrVal; + }); +}; + +var _default = includes; +exports.default = _default; +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/util/merge.js b/node_modules/validator/lib/util/merge.js new file mode 100644 index 0000000..a96c739 --- /dev/null +++ b/node_modules/validator/lib/util/merge.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = merge; + +function merge() { + var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var defaults = arguments.length > 1 ? arguments[1] : undefined; + + for (var key in defaults) { + if (typeof obj[key] === 'undefined') { + obj[key] = defaults[key]; + } + } + + return obj; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/util/multilineRegex.js b/node_modules/validator/lib/util/multilineRegex.js new file mode 100644 index 0000000..6980d14 --- /dev/null +++ b/node_modules/validator/lib/util/multilineRegex.js @@ -0,0 +1,22 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = multilineRegexp; + +/** + * Build RegExp object from an array + * of multiple/multi-line regexp parts + * + * @param {string[]} parts + * @param {string} flags + * @return {object} - RegExp object + */ +function multilineRegexp(parts, flags) { + var regexpAsStringLiteral = parts.join(''); + return new RegExp(regexpAsStringLiteral, flags); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/util/toString.js b/node_modules/validator/lib/util/toString.js new file mode 100644 index 0000000..6295192 --- /dev/null +++ b/node_modules/validator/lib/util/toString.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = toString; + +function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } + +function toString(input) { + if (_typeof(input) === 'object' && input !== null) { + if (typeof input.toString === 'function') { + input = input.toString(); + } else { + input = '[object Object]'; + } + } else if (input === null || typeof input === 'undefined' || isNaN(input) && !input.length) { + input = ''; + } + + return String(input); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/util/typeOf.js b/node_modules/validator/lib/util/typeOf.js new file mode 100644 index 0000000..5bfacdb --- /dev/null +++ b/node_modules/validator/lib/util/typeOf.js @@ -0,0 +1,20 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = typeOf; + +/** + * Better way to handle type checking + * null, {}, array and date are objects, which confuses + */ +function typeOf(input) { + var rawObject = Object.prototype.toString.call(input).toLowerCase(); + var typeOfRegex = /\[object (.*)]/g; + var type = typeOfRegex.exec(rawObject)[1]; + return type; +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/lib/whitelist.js b/node_modules/validator/lib/whitelist.js new file mode 100644 index 0000000..7ae624e --- /dev/null +++ b/node_modules/validator/lib/whitelist.js @@ -0,0 +1,18 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = whitelist; + +var _assertString = _interopRequireDefault(require("./util/assertString")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function whitelist(str, chars) { + (0, _assertString.default)(str); + return str.replace(new RegExp("[^".concat(chars, "]+"), 'g'), ''); +} + +module.exports = exports.default; +module.exports.default = exports.default; \ No newline at end of file diff --git a/node_modules/validator/package.json b/node_modules/validator/package.json new file mode 100644 index 0000000..7d50520 --- /dev/null +++ b/node_modules/validator/package.json @@ -0,0 +1,75 @@ +{ + "name": "validator", + "description": "String validation and sanitization", + "version": "13.7.0", + "sideEffects": false, + "homepage": "https://github.com/validatorjs/validator.js", + "files": [ + "index.js", + "es", + "lib", + "README.md", + "LICENCE", + "validator.js", + "validator.min.js" + ], + "keywords": [ + "validator", + "validation", + "validate", + "sanitization", + "sanitize", + "sanitisation", + "sanitise", + "assert" + ], + "author": "Chris O'Hara ", + "contributors": [ + "Anthony Nandaa (https://github.com/profnandaa)" + ], + "main": "index.js", + "bugs": { + "url": "https://github.com/validatorjs/validator.js/issues" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/validatorjs/validator.js.git" + }, + "devDependencies": { + "@babel/cli": "^7.0.0", + "@babel/core": "^7.0.0", + "@babel/preset-env": "^7.0.0", + "@babel/register": "^7.0.0", + "babel-eslint": "^10.0.1", + "babel-plugin-add-module-exports": "^1.0.0", + "eslint": "^4.19.1", + "eslint-config-airbnb-base": "^12.1.0", + "eslint-plugin-import": "^2.11.0", + "mocha": "^6.2.3", + "npm-run-all": "^4.1.5", + "nyc": "^14.1.0", + "rimraf": "^3.0.0", + "rollup": "^0.47.0", + "rollup-plugin-babel": "^4.0.1", + "uglify-js": "^3.0.19" + }, + "scripts": { + "lint": "eslint src test", + "lint:fix": "eslint --fix src test", + "clean:node": "rimraf index.js lib", + "clean:es": "rimraf es", + "clean:browser": "rimraf validator*.js", + "clean": "run-p clean:*", + "minify": "uglifyjs validator.js -o validator.min.js --compress --mangle --comments /Copyright/", + "build:browser": "node --require @babel/register build-browser && npm run minify", + "build:es": "babel src -d es --env-name=es", + "build:node": "babel src -d .", + "build": "run-p build:*", + "pretest": "npm run build && npm run lint", + "test": "nyc --reporter=cobertura --reporter=text-summary mocha --require @babel/register --reporter dot" + }, + "engines": { + "node": ">= 0.10" + }, + "license": "MIT" +} diff --git a/node_modules/validator/validator.js b/node_modules/validator/validator.js new file mode 100644 index 0000000..32a8814 --- /dev/null +++ b/node_modules/validator/validator.js @@ -0,0 +1,5094 @@ +/*! + * Copyright (c) 2018 Chris O'Hara + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : + typeof define === 'function' && define.amd ? define(factory) : + (global.validator = factory()); +}(this, (function () { 'use strict'; + +function _typeof(obj) { + "@babel/helpers - typeof"; + + if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { + _typeof = function (obj) { + return typeof obj; + }; + } else { + _typeof = function (obj) { + return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; + }; + } + + return _typeof(obj); +} + +function _slicedToArray(arr, i) { + return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); +} + +function _toConsumableArray(arr) { + return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); +} + +function _arrayWithoutHoles(arr) { + if (Array.isArray(arr)) return _arrayLikeToArray(arr); +} + +function _arrayWithHoles(arr) { + if (Array.isArray(arr)) return arr; +} + +function _iterableToArray(iter) { + if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); +} + +function _iterableToArrayLimit(arr, i) { + if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; + var _arr = []; + var _n = true; + var _d = false; + var _e = undefined; + + try { + for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { + _arr.push(_s.value); + + if (i && _arr.length === i) break; + } + } catch (err) { + _d = true; + _e = err; + } finally { + try { + if (!_n && _i["return"] != null) _i["return"](); + } finally { + if (_d) throw _e; + } + } + + return _arr; +} + +function _unsupportedIterableToArray(o, minLen) { + if (!o) return; + if (typeof o === "string") return _arrayLikeToArray(o, minLen); + var n = Object.prototype.toString.call(o).slice(8, -1); + if (n === "Object" && o.constructor) n = o.constructor.name; + if (n === "Map" || n === "Set") return Array.from(o); + if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); +} + +function _arrayLikeToArray(arr, len) { + if (len == null || len > arr.length) len = arr.length; + + for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; + + return arr2; +} + +function _nonIterableSpread() { + throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); +} + +function _nonIterableRest() { + throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); +} + +function _createForOfIteratorHelper(o, allowArrayLike) { + var it; + + if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { + if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { + if (it) o = it; + var i = 0; + + var F = function () {}; + + return { + s: F, + n: function () { + if (i >= o.length) return { + done: true + }; + return { + done: false, + value: o[i++] + }; + }, + e: function (e) { + throw e; + }, + f: F + }; + } + + throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); + } + + var normalCompletion = true, + didErr = false, + err; + return { + s: function () { + it = o[Symbol.iterator](); + }, + n: function () { + var step = it.next(); + normalCompletion = step.done; + return step; + }, + e: function (e) { + didErr = true; + err = e; + }, + f: function () { + try { + if (!normalCompletion && it.return != null) it.return(); + } finally { + if (didErr) throw err; + } + } + }; +} + +function assertString(input) { + var isString = typeof input === 'string' || input instanceof String; + + if (!isString) { + var invalidType = _typeof(input); + + if (input === null) invalidType = 'null';else if (invalidType === 'object') invalidType = input.constructor.name; + throw new TypeError("Expected a string but received a ".concat(invalidType)); + } +} + +function toDate(date) { + assertString(date); + date = Date.parse(date); + return !isNaN(date) ? new Date(date) : null; +} + +var alpha = { + 'en-US': /^[A-Z]+$/i, + 'az-AZ': /^[A-VXYZÇƏĞİıÖŞÜ]+$/i, + 'bg-BG': /^[А-Я]+$/i, + 'cs-CZ': /^[A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i, + 'da-DK': /^[A-ZÆØÅ]+$/i, + 'de-DE': /^[A-ZÄÖÜß]+$/i, + 'el-GR': /^[Α-ώ]+$/i, + 'es-ES': /^[A-ZÁÉÍÑÓÚÜ]+$/i, + 'fa-IR': /^[ابپتثجچحخدذرزژسشصضطظعغفقکگلمنوهی]+$/i, + 'fi-FI': /^[A-ZÅÄÖ]+$/i, + 'fr-FR': /^[A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i, + 'it-IT': /^[A-ZÀÉÈÌÎÓÒÙ]+$/i, + 'nb-NO': /^[A-ZÆØÅ]+$/i, + 'nl-NL': /^[A-ZÁÉËÏÓÖÜÚ]+$/i, + 'nn-NO': /^[A-ZÆØÅ]+$/i, + 'hu-HU': /^[A-ZÁÉÍÓÖŐÚÜŰ]+$/i, + 'pl-PL': /^[A-ZĄĆĘŚŁŃÓŻŹ]+$/i, + 'pt-PT': /^[A-ZÃÁÀÂÄÇÉÊËÍÏÕÓÔÖÚÜ]+$/i, + 'ru-RU': /^[А-ЯЁ]+$/i, + 'sl-SI': /^[A-ZČĆĐŠŽ]+$/i, + 'sk-SK': /^[A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i, + 'sr-RS@latin': /^[A-ZČĆŽŠĐ]+$/i, + 'sr-RS': /^[А-ЯЂЈЉЊЋЏ]+$/i, + 'sv-SE': /^[A-ZÅÄÖ]+$/i, + 'th-TH': /^[ก-๐\s]+$/i, + 'tr-TR': /^[A-ZÇĞİıÖŞÜ]+$/i, + 'uk-UA': /^[А-ЩЬЮЯЄIЇҐі]+$/i, + 'vi-VN': /^[A-ZÀÁẠẢÃÂẦẤẬẨẪĂẰẮẶẲẴĐÈÉẸẺẼÊỀẾỆỂỄÌÍỊỈĨÒÓỌỎÕÔỒỐỘỔỖƠỜỚỢỞỠÙÚỤỦŨƯỪỨỰỬỮỲÝỴỶỸ]+$/i, + 'ku-IQ': /^[ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i, + ar: /^[ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/, + he: /^[א-ת]+$/, + fa: /^['آاءأؤئبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهةی']+$/i, + 'hi-IN': /^[\u0900-\u0961]+[\u0972-\u097F]*$/i +}; +var alphanumeric = { + 'en-US': /^[0-9A-Z]+$/i, + 'az-AZ': /^[0-9A-VXYZÇƏĞİıÖŞÜ]+$/i, + 'bg-BG': /^[0-9А-Я]+$/i, + 'cs-CZ': /^[0-9A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]+$/i, + 'da-DK': /^[0-9A-ZÆØÅ]+$/i, + 'de-DE': /^[0-9A-ZÄÖÜß]+$/i, + 'el-GR': /^[0-9Α-ω]+$/i, + 'es-ES': /^[0-9A-ZÁÉÍÑÓÚÜ]+$/i, + 'fi-FI': /^[0-9A-ZÅÄÖ]+$/i, + 'fr-FR': /^[0-9A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]+$/i, + 'it-IT': /^[0-9A-ZÀÉÈÌÎÓÒÙ]+$/i, + 'hu-HU': /^[0-9A-ZÁÉÍÓÖŐÚÜŰ]+$/i, + 'nb-NO': /^[0-9A-ZÆØÅ]+$/i, + 'nl-NL': /^[0-9A-ZÁÉËÏÓÖÜÚ]+$/i, + 'nn-NO': /^[0-9A-ZÆØÅ]+$/i, + 'pl-PL': /^[0-9A-ZĄĆĘŚŁŃÓŻŹ]+$/i, + 'pt-PT': /^[0-9A-ZÃÁÀÂÄÇÉÊËÍÏÕÓÔÖÚÜ]+$/i, + 'ru-RU': /^[0-9А-ЯЁ]+$/i, + 'sl-SI': /^[0-9A-ZČĆĐŠŽ]+$/i, + 'sk-SK': /^[0-9A-ZÁČĎÉÍŇÓŠŤÚÝŽĹŔĽÄÔ]+$/i, + 'sr-RS@latin': /^[0-9A-ZČĆŽŠĐ]+$/i, + 'sr-RS': /^[0-9А-ЯЂЈЉЊЋЏ]+$/i, + 'sv-SE': /^[0-9A-ZÅÄÖ]+$/i, + 'th-TH': /^[ก-๙\s]+$/i, + 'tr-TR': /^[0-9A-ZÇĞİıÖŞÜ]+$/i, + 'uk-UA': /^[0-9А-ЩЬЮЯЄIЇҐі]+$/i, + 'ku-IQ': /^[٠١٢٣٤٥٦٧٨٩0-9ئابپتجچحخدرڕزژسشعغفڤقکگلڵمنوۆھەیێيطؤثآإأكضصةظذ]+$/i, + 'vi-VN': /^[0-9A-ZÀÁẠẢÃÂẦẤẬẨẪĂẰẮẶẲẴĐÈÉẸẺẼÊỀẾỆỂỄÌÍỊỈĨÒÓỌỎÕÔỒỐỘỔỖƠỜỚỢỞỠÙÚỤỦŨƯỪỨỰỬỮỲÝỴỶỸ]+$/i, + ar: /^[٠١٢٣٤٥٦٧٨٩0-9ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]+$/, + he: /^[0-9א-ת]+$/, + fa: /^['0-9آاءأؤئبپتثجچحخدذرزژسشصضطظعغفقکگلمنوهةی۱۲۳۴۵۶۷۸۹۰']+$/i, + 'hi-IN': /^[\u0900-\u0963]+[\u0966-\u097F]*$/i +}; +var decimal = { + 'en-US': '.', + ar: '٫' +}; +var englishLocales = ['AU', 'GB', 'HK', 'IN', 'NZ', 'ZA', 'ZM']; + +for (var locale, i = 0; i < englishLocales.length; i++) { + locale = "en-".concat(englishLocales[i]); + alpha[locale] = alpha['en-US']; + alphanumeric[locale] = alphanumeric['en-US']; + decimal[locale] = decimal['en-US']; +} // Source: http://www.localeplanet.com/java/ + + +var arabicLocales = ['AE', 'BH', 'DZ', 'EG', 'IQ', 'JO', 'KW', 'LB', 'LY', 'MA', 'QM', 'QA', 'SA', 'SD', 'SY', 'TN', 'YE']; + +for (var _locale, _i = 0; _i < arabicLocales.length; _i++) { + _locale = "ar-".concat(arabicLocales[_i]); + alpha[_locale] = alpha.ar; + alphanumeric[_locale] = alphanumeric.ar; + decimal[_locale] = decimal.ar; +} + +var farsiLocales = ['IR', 'AF']; + +for (var _locale2, _i2 = 0; _i2 < farsiLocales.length; _i2++) { + _locale2 = "fa-".concat(farsiLocales[_i2]); + alphanumeric[_locale2] = alphanumeric.fa; + decimal[_locale2] = decimal.ar; +} // Source: https://en.wikipedia.org/wiki/Decimal_mark + + +var dotDecimal = ['ar-EG', 'ar-LB', 'ar-LY']; +var commaDecimal = ['bg-BG', 'cs-CZ', 'da-DK', 'de-DE', 'el-GR', 'en-ZM', 'es-ES', 'fr-CA', 'fr-FR', 'id-ID', 'it-IT', 'ku-IQ', 'hi-IN', 'hu-HU', 'nb-NO', 'nn-NO', 'nl-NL', 'pl-PL', 'pt-PT', 'ru-RU', 'sl-SI', 'sr-RS@latin', 'sr-RS', 'sv-SE', 'tr-TR', 'uk-UA', 'vi-VN']; + +for (var _i3 = 0; _i3 < dotDecimal.length; _i3++) { + decimal[dotDecimal[_i3]] = decimal['en-US']; +} + +for (var _i4 = 0; _i4 < commaDecimal.length; _i4++) { + decimal[commaDecimal[_i4]] = ','; +} + +alpha['fr-CA'] = alpha['fr-FR']; +alphanumeric['fr-CA'] = alphanumeric['fr-FR']; +alpha['pt-BR'] = alpha['pt-PT']; +alphanumeric['pt-BR'] = alphanumeric['pt-PT']; +decimal['pt-BR'] = decimal['pt-PT']; // see #862 + +alpha['pl-Pl'] = alpha['pl-PL']; +alphanumeric['pl-Pl'] = alphanumeric['pl-PL']; +decimal['pl-Pl'] = decimal['pl-PL']; // see #1455 + +alpha['fa-AF'] = alpha.fa; + +function isFloat(str, options) { + assertString(str); + options = options || {}; + + var _float = new RegExp("^(?:[-+])?(?:[0-9]+)?(?:\\".concat(options.locale ? decimal[options.locale] : '.', "[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$")); + + if (str === '' || str === '.' || str === '-' || str === '+') { + return false; + } + + var value = parseFloat(str.replace(',', '.')); + return _float.test(str) && (!options.hasOwnProperty('min') || value >= options.min) && (!options.hasOwnProperty('max') || value <= options.max) && (!options.hasOwnProperty('lt') || value < options.lt) && (!options.hasOwnProperty('gt') || value > options.gt); +} +var locales = Object.keys(decimal); + +function toFloat(str) { + if (!isFloat(str)) return NaN; + return parseFloat(str); +} + +function toInt(str, radix) { + assertString(str); + return parseInt(str, radix || 10); +} + +function toBoolean(str, strict) { + assertString(str); + + if (strict) { + return str === '1' || /^true$/i.test(str); + } + + return str !== '0' && !/^false$/i.test(str) && str !== ''; +} + +function equals(str, comparison) { + assertString(str); + return str === comparison; +} + +function toString$1(input) { + if (_typeof(input) === 'object' && input !== null) { + if (typeof input.toString === 'function') { + input = input.toString(); + } else { + input = '[object Object]'; + } + } else if (input === null || typeof input === 'undefined' || isNaN(input) && !input.length) { + input = ''; + } + + return String(input); +} + +function merge() { + var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var defaults = arguments.length > 1 ? arguments[1] : undefined; + + for (var key in defaults) { + if (typeof obj[key] === 'undefined') { + obj[key] = defaults[key]; + } + } + + return obj; +} + +var defaulContainsOptions = { + ignoreCase: false, + minOccurrences: 1 +}; +function contains(str, elem, options) { + assertString(str); + options = merge(options, defaulContainsOptions); + + if (options.ignoreCase) { + return str.toLowerCase().split(toString$1(elem).toLowerCase()).length > options.minOccurrences; + } + + return str.split(toString$1(elem)).length > options.minOccurrences; +} + +function matches(str, pattern, modifiers) { + assertString(str); + + if (Object.prototype.toString.call(pattern) !== '[object RegExp]') { + pattern = new RegExp(pattern, modifiers); + } + + return pattern.test(str); +} + +/* eslint-disable prefer-rest-params */ + +function isByteLength(str, options) { + assertString(str); + var min; + var max; + + if (_typeof(options) === 'object') { + min = options.min || 0; + max = options.max; + } else { + // backwards compatibility: isByteLength(str, min [, max]) + min = arguments[1]; + max = arguments[2]; + } + + var len = encodeURI(str).split(/%..|./).length - 1; + return len >= min && (typeof max === 'undefined' || len <= max); +} + +var default_fqdn_options = { + require_tld: true, + allow_underscores: false, + allow_trailing_dot: false, + allow_numeric_tld: false, + allow_wildcard: false +}; +function isFQDN(str, options) { + assertString(str); + options = merge(options, default_fqdn_options); + /* Remove the optional trailing dot before checking validity */ + + if (options.allow_trailing_dot && str[str.length - 1] === '.') { + str = str.substring(0, str.length - 1); + } + /* Remove the optional wildcard before checking validity */ + + + if (options.allow_wildcard === true && str.indexOf('*.') === 0) { + str = str.substring(2); + } + + var parts = str.split('.'); + var tld = parts[parts.length - 1]; + + if (options.require_tld) { + // disallow fqdns without tld + if (parts.length < 2) { + return false; + } + + if (!/^([a-z\u00A1-\u00A8\u00AA-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]{2,}|xn[a-z0-9-]{2,})$/i.test(tld)) { + return false; + } // disallow spaces + + + if (/\s/.test(tld)) { + return false; + } + } // reject numeric TLDs + + + if (!options.allow_numeric_tld && /^\d+$/.test(tld)) { + return false; + } + + return parts.every(function (part) { + if (part.length > 63) { + return false; + } + + if (!/^[a-z_\u00a1-\uffff0-9-]+$/i.test(part)) { + return false; + } // disallow full-width chars + + + if (/[\uff01-\uff5e]/.test(part)) { + return false; + } // disallow parts starting or ending with hyphen + + + if (/^-|-$/.test(part)) { + return false; + } + + if (!options.allow_underscores && /_/.test(part)) { + return false; + } + + return true; + }); +} + +/** +11.3. Examples + + The following addresses + + fe80::1234 (on the 1st link of the node) + ff02::5678 (on the 5th link of the node) + ff08::9abc (on the 10th organization of the node) + + would be represented as follows: + + fe80::1234%1 + ff02::5678%5 + ff08::9abc%10 + + (Here we assume a natural translation from a zone index to the + part, where the Nth zone of any scope is translated into + "N".) + + If we use interface names as , those addresses could also be + represented as follows: + + fe80::1234%ne0 + ff02::5678%pvc1.3 + ff08::9abc%interface10 + + where the interface "ne0" belongs to the 1st link, "pvc1.3" belongs + to the 5th link, and "interface10" belongs to the 10th organization. + * * */ + +var IPv4SegmentFormat = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'; +var IPv4AddressFormat = "(".concat(IPv4SegmentFormat, "[.]){3}").concat(IPv4SegmentFormat); +var IPv4AddressRegExp = new RegExp("^".concat(IPv4AddressFormat, "$")); +var IPv6SegmentFormat = '(?:[0-9a-fA-F]{1,4})'; +var IPv6AddressRegExp = new RegExp('^(' + "(?:".concat(IPv6SegmentFormat, ":){7}(?:").concat(IPv6SegmentFormat, "|:)|") + "(?:".concat(IPv6SegmentFormat, ":){6}(?:").concat(IPv4AddressFormat, "|:").concat(IPv6SegmentFormat, "|:)|") + "(?:".concat(IPv6SegmentFormat, ":){5}(?::").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,2}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){4}(?:(:").concat(IPv6SegmentFormat, "){0,1}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,3}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){3}(?:(:").concat(IPv6SegmentFormat, "){0,2}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,4}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){2}(?:(:").concat(IPv6SegmentFormat, "){0,3}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,5}|:)|") + "(?:".concat(IPv6SegmentFormat, ":){1}(?:(:").concat(IPv6SegmentFormat, "){0,4}:").concat(IPv4AddressFormat, "|(:").concat(IPv6SegmentFormat, "){1,6}|:)|") + "(?::((?::".concat(IPv6SegmentFormat, "){0,5}:").concat(IPv4AddressFormat, "|(?::").concat(IPv6SegmentFormat, "){1,7}|:))") + ')(%[0-9a-zA-Z-.:]{1,})?$'); +function isIP(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + assertString(str); + version = String(version); + + if (!version) { + return isIP(str, 4) || isIP(str, 6); + } + + if (version === '4') { + if (!IPv4AddressRegExp.test(str)) { + return false; + } + + var parts = str.split('.').sort(function (a, b) { + return a - b; + }); + return parts[3] <= 255; + } + + if (version === '6') { + return !!IPv6AddressRegExp.test(str); + } + + return false; +} + +var default_email_options = { + allow_display_name: false, + require_display_name: false, + allow_utf8_local_part: true, + require_tld: true, + blacklisted_chars: '', + ignore_max_length: false, + host_blacklist: [] +}; +/* eslint-disable max-len */ + +/* eslint-disable no-control-regex */ + +var splitNameAddress = /^([^\x00-\x1F\x7F-\x9F\cX]+)]/.test(display_name_without_quotes); + + if (contains_illegal) { + // if contains illegal characters, + // must to be enclosed in double-quotes, otherwise it's not a valid display name + if (display_name_without_quotes === display_name) { + return false; + } // the quotes in display name must start with character symbol \ + + + var all_start_with_back_slash = display_name_without_quotes.split('"').length === display_name_without_quotes.split('\\"').length; + + if (!all_start_with_back_slash) { + return false; + } + } + + return true; +} + +function isEmail(str, options) { + assertString(str); + options = merge(options, default_email_options); + + if (options.require_display_name || options.allow_display_name) { + var display_email = str.match(splitNameAddress); + + if (display_email) { + var display_name = display_email[1]; // Remove display name and angle brackets to get email address + // Can be done in the regex but will introduce a ReDOS (See #1597 for more info) + + str = str.replace(display_name, '').replace(/(^<|>$)/g, ''); // sometimes need to trim the last space to get the display name + // because there may be a space between display name and email address + // eg. myname + // the display name is `myname` instead of `myname `, so need to trim the last space + + if (display_name.endsWith(' ')) { + display_name = display_name.substr(0, display_name.length - 1); + } + + if (!validateDisplayName(display_name)) { + return false; + } + } else if (options.require_display_name) { + return false; + } + } + + if (!options.ignore_max_length && str.length > defaultMaxEmailLength) { + return false; + } + + var parts = str.split('@'); + var domain = parts.pop(); + var lower_domain = domain.toLowerCase(); + + if (options.host_blacklist.includes(lower_domain)) { + return false; + } + + var user = parts.join('@'); + + if (options.domain_specific_validation && (lower_domain === 'gmail.com' || lower_domain === 'googlemail.com')) { + /* + Previously we removed dots for gmail addresses before validating. + This was removed because it allows `multiple..dots@gmail.com` + to be reported as valid, but it is not. + Gmail only normalizes single dots, removing them from here is pointless, + should be done in normalizeEmail + */ + user = user.toLowerCase(); // Removing sub-address from username before gmail validation + + var username = user.split('+')[0]; // Dots are not included in gmail length restriction + + if (!isByteLength(username.replace(/\./g, ''), { + min: 6, + max: 30 + })) { + return false; + } + + var _user_parts = username.split('.'); + + for (var i = 0; i < _user_parts.length; i++) { + if (!gmailUserPart.test(_user_parts[i])) { + return false; + } + } + } + + if (options.ignore_max_length === false && (!isByteLength(user, { + max: 64 + }) || !isByteLength(domain, { + max: 254 + }))) { + return false; + } + + if (!isFQDN(domain, { + require_tld: options.require_tld + })) { + if (!options.allow_ip_domain) { + return false; + } + + if (!isIP(domain)) { + if (!domain.startsWith('[') || !domain.endsWith(']')) { + return false; + } + + var noBracketdomain = domain.substr(1, domain.length - 2); + + if (noBracketdomain.length === 0 || !isIP(noBracketdomain)) { + return false; + } + } + } + + if (user[0] === '"') { + user = user.slice(1, user.length - 1); + return options.allow_utf8_local_part ? quotedEmailUserUtf8.test(user) : quotedEmailUser.test(user); + } + + var pattern = options.allow_utf8_local_part ? emailUserUtf8Part : emailUserPart; + var user_parts = user.split('.'); + + for (var _i = 0; _i < user_parts.length; _i++) { + if (!pattern.test(user_parts[_i])) { + return false; + } + } + + if (options.blacklisted_chars) { + if (user.search(new RegExp("[".concat(options.blacklisted_chars, "]+"), 'g')) !== -1) return false; + } + + return true; +} + +/* +options for isURL method + +require_protocol - if set as true isURL will return false if protocol is not present in the URL +require_valid_protocol - isURL will check if the URL's protocol is present in the protocols option +protocols - valid protocols can be modified with this option +require_host - if set as false isURL will not check if host is present in the URL +require_port - if set as true isURL will check if port is present in the URL +allow_protocol_relative_urls - if set as true protocol relative URLs will be allowed +validate_length - if set as false isURL will skip string length validation (IE maximum is 2083) + +*/ + +var default_url_options = { + protocols: ['http', 'https', 'ftp'], + require_tld: true, + require_protocol: false, + require_host: true, + require_port: false, + require_valid_protocol: true, + allow_underscores: false, + allow_trailing_dot: false, + allow_protocol_relative_urls: false, + allow_fragments: true, + allow_query_components: true, + validate_length: true +}; +var wrapped_ipv6 = /^\[([^\]]+)\](?::([0-9]+))?$/; + +function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +} + +function checkHost(host, matches) { + for (var i = 0; i < matches.length; i++) { + var match = matches[i]; + + if (host === match || isRegExp(match) && match.test(host)) { + return true; + } + } + + return false; +} + +function isURL(url, options) { + assertString(url); + + if (!url || /[\s<>]/.test(url)) { + return false; + } + + if (url.indexOf('mailto:') === 0) { + return false; + } + + options = merge(options, default_url_options); + + if (options.validate_length && url.length >= 2083) { + return false; + } + + if (!options.allow_fragments && url.includes('#')) { + return false; + } + + if (!options.allow_query_components && (url.includes('?') || url.includes('&'))) { + return false; + } + + var protocol, auth, host, hostname, port, port_str, split, ipv6; + split = url.split('#'); + url = split.shift(); + split = url.split('?'); + url = split.shift(); + split = url.split('://'); + + if (split.length > 1) { + protocol = split.shift().toLowerCase(); + + if (options.require_valid_protocol && options.protocols.indexOf(protocol) === -1) { + return false; + } + } else if (options.require_protocol) { + return false; + } else if (url.substr(0, 2) === '//') { + if (!options.allow_protocol_relative_urls) { + return false; + } + + split[0] = url.substr(2); + } + + url = split.join('://'); + + if (url === '') { + return false; + } + + split = url.split('/'); + url = split.shift(); + + if (url === '' && !options.require_host) { + return true; + } + + split = url.split('@'); + + if (split.length > 1) { + if (options.disallow_auth) { + return false; + } + + if (split[0] === '') { + return false; + } + + auth = split.shift(); + + if (auth.indexOf(':') >= 0 && auth.split(':').length > 2) { + return false; + } + + var _auth$split = auth.split(':'), + _auth$split2 = _slicedToArray(_auth$split, 2), + user = _auth$split2[0], + password = _auth$split2[1]; + + if (user === '' && password === '') { + return false; + } + } + + hostname = split.join('@'); + port_str = null; + ipv6 = null; + var ipv6_match = hostname.match(wrapped_ipv6); + + if (ipv6_match) { + host = ''; + ipv6 = ipv6_match[1]; + port_str = ipv6_match[2] || null; + } else { + split = hostname.split(':'); + host = split.shift(); + + if (split.length) { + port_str = split.join(':'); + } + } + + if (port_str !== null && port_str.length > 0) { + port = parseInt(port_str, 10); + + if (!/^[0-9]+$/.test(port_str) || port <= 0 || port > 65535) { + return false; + } + } else if (options.require_port) { + return false; + } + + if (options.host_whitelist) { + return checkHost(host, options.host_whitelist); + } + + if (!isIP(host) && !isFQDN(host, options) && (!ipv6 || !isIP(ipv6, 6))) { + return false; + } + + host = host || ipv6; + + if (options.host_blacklist && checkHost(host, options.host_blacklist)) { + return false; + } + + return true; +} + +var macAddress = /^(?:[0-9a-fA-F]{2}([-:\s]))([0-9a-fA-F]{2}\1){4}([0-9a-fA-F]{2})$/; +var macAddressNoSeparators = /^([0-9a-fA-F]){12}$/; +var macAddressWithDots = /^([0-9a-fA-F]{4}\.){2}([0-9a-fA-F]{4})$/; +function isMACAddress(str, options) { + assertString(str); + /** + * @deprecated `no_colons` TODO: remove it in the next major + */ + + if (options && (options.no_colons || options.no_separators)) { + return macAddressNoSeparators.test(str); + } + + return macAddress.test(str) || macAddressWithDots.test(str); +} + +var subnetMaybe = /^\d{1,3}$/; +var v4Subnet = 32; +var v6Subnet = 128; +function isIPRange(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + assertString(str); + var parts = str.split('/'); // parts[0] -> ip, parts[1] -> subnet + + if (parts.length !== 2) { + return false; + } + + if (!subnetMaybe.test(parts[1])) { + return false; + } // Disallow preceding 0 i.e. 01, 02, ... + + + if (parts[1].length > 1 && parts[1].startsWith('0')) { + return false; + } + + var isValidIP = isIP(parts[0], version); + + if (!isValidIP) { + return false; + } // Define valid subnet according to IP's version + + + var expectedSubnet = null; + + switch (String(version)) { + case '4': + expectedSubnet = v4Subnet; + break; + + case '6': + expectedSubnet = v6Subnet; + break; + + default: + expectedSubnet = isIP(parts[0], '6') ? v6Subnet : v4Subnet; + } + + return parts[1] <= expectedSubnet && parts[1] >= 0; +} + +var default_date_options = { + format: 'YYYY/MM/DD', + delimiters: ['/', '-'], + strictMode: false +}; + +function isValidFormat(format) { + return /(^(y{4}|y{2})[.\/-](m{1,2})[.\/-](d{1,2})$)|(^(m{1,2})[.\/-](d{1,2})[.\/-]((y{4}|y{2})$))|(^(d{1,2})[.\/-](m{1,2})[.\/-]((y{4}|y{2})$))/gi.test(format); +} + +function zip(date, format) { + var zippedArr = [], + len = Math.min(date.length, format.length); + + for (var i = 0; i < len; i++) { + zippedArr.push([date[i], format[i]]); + } + + return zippedArr; +} + +function isDate(input, options) { + if (typeof options === 'string') { + // Allow backward compatbility for old format isDate(input [, format]) + options = merge({ + format: options + }, default_date_options); + } else { + options = merge(options, default_date_options); + } + + if (typeof input === 'string' && isValidFormat(options.format)) { + var formatDelimiter = options.delimiters.find(function (delimiter) { + return options.format.indexOf(delimiter) !== -1; + }); + var dateDelimiter = options.strictMode ? formatDelimiter : options.delimiters.find(function (delimiter) { + return input.indexOf(delimiter) !== -1; + }); + var dateAndFormat = zip(input.split(dateDelimiter), options.format.toLowerCase().split(formatDelimiter)); + var dateObj = {}; + + var _iterator = _createForOfIteratorHelper(dateAndFormat), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _slicedToArray(_step.value, 2), + dateWord = _step$value[0], + formatWord = _step$value[1]; + + if (dateWord.length !== formatWord.length) { + return false; + } + + dateObj[formatWord.charAt(0)] = dateWord; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return new Date("".concat(dateObj.m, "/").concat(dateObj.d, "/").concat(dateObj.y)).getDate() === +dateObj.d; + } + + if (!options.strictMode) { + return Object.prototype.toString.call(input) === '[object Date]' && isFinite(input); + } + + return false; +} + +var defaultOptions = { + loose: false +}; +var strictBooleans = ['true', 'false', '1', '0']; +var looseBooleans = [].concat(strictBooleans, ['yes', 'no']); +function isBoolean(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : defaultOptions; + assertString(str); + + if (options.loose) { + return looseBooleans.includes(str.toLowerCase()); + } + + return strictBooleans.includes(str); +} + +var localeReg = /^[A-Za-z]{2,4}([_-]([A-Za-z]{4}|[\d]{3}))?([_-]([A-Za-z]{2}|[\d]{3}))?$/; +function isLocale(str) { + assertString(str); + + if (str === 'en_US_POSIX' || str === 'ca_ES_VALENCIA') { + return true; + } + + return localeReg.test(str); +} + +function isAlpha(_str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + assertString(_str); + var str = _str; + var ignore = options.ignore; + + if (ignore) { + if (ignore instanceof RegExp) { + str = str.replace(ignore, ''); + } else if (typeof ignore === 'string') { + str = str.replace(new RegExp("[".concat(ignore.replace(/[-[\]{}()*+?.,\\^$|#\\s]/g, '\\$&'), "]"), 'g'), ''); // escape regex for ignore + } else { + throw new Error('ignore should be instance of a String or RegExp'); + } + } + + if (locale in alpha) { + return alpha[locale].test(str); + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} +var locales$1 = Object.keys(alpha); + +function isAlphanumeric(_str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + assertString(_str); + var str = _str; + var ignore = options.ignore; + + if (ignore) { + if (ignore instanceof RegExp) { + str = str.replace(ignore, ''); + } else if (typeof ignore === 'string') { + str = str.replace(new RegExp("[".concat(ignore.replace(/[-[\]{}()*+?.,\\^$|#\\s]/g, '\\$&'), "]"), 'g'), ''); // escape regex for ignore + } else { + throw new Error('ignore should be instance of a String or RegExp'); + } + } + + if (locale in alphanumeric) { + return alphanumeric[locale].test(str); + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} +var locales$2 = Object.keys(alphanumeric); + +var numericNoSymbols = /^[0-9]+$/; +function isNumeric(str, options) { + assertString(str); + + if (options && options.no_symbols) { + return numericNoSymbols.test(str); + } + + return new RegExp("^[+-]?([0-9]*[".concat((options || {}).locale ? decimal[options.locale] : '.', "])?[0-9]+$")).test(str); +} + +/** + * Reference: + * https://en.wikipedia.org/ -- Wikipedia + * https://docs.microsoft.com/en-us/microsoft-365/compliance/eu-passport-number -- EU Passport Number + * https://countrycode.org/ -- Country Codes + */ + +var passportRegexByCountryCode = { + AM: /^[A-Z]{2}\d{7}$/, + // ARMENIA + AR: /^[A-Z]{3}\d{6}$/, + // ARGENTINA + AT: /^[A-Z]\d{7}$/, + // AUSTRIA + AU: /^[A-Z]\d{7}$/, + // AUSTRALIA + BE: /^[A-Z]{2}\d{6}$/, + // BELGIUM + BG: /^\d{9}$/, + // BULGARIA + BR: /^[A-Z]{2}\d{6}$/, + // BRAZIL + BY: /^[A-Z]{2}\d{7}$/, + // BELARUS + CA: /^[A-Z]{2}\d{6}$/, + // CANADA + CH: /^[A-Z]\d{7}$/, + // SWITZERLAND + CN: /^G\d{8}$|^E(?![IO])[A-Z0-9]\d{7}$/, + // CHINA [G=Ordinary, E=Electronic] followed by 8-digits, or E followed by any UPPERCASE letter (except I and O) followed by 7 digits + CY: /^[A-Z](\d{6}|\d{8})$/, + // CYPRUS + CZ: /^\d{8}$/, + // CZECH REPUBLIC + DE: /^[CFGHJKLMNPRTVWXYZ0-9]{9}$/, + // GERMANY + DK: /^\d{9}$/, + // DENMARK + DZ: /^\d{9}$/, + // ALGERIA + EE: /^([A-Z]\d{7}|[A-Z]{2}\d{7})$/, + // ESTONIA (K followed by 7-digits), e-passports have 2 UPPERCASE followed by 7 digits + ES: /^[A-Z0-9]{2}([A-Z0-9]?)\d{6}$/, + // SPAIN + FI: /^[A-Z]{2}\d{7}$/, + // FINLAND + FR: /^\d{2}[A-Z]{2}\d{5}$/, + // FRANCE + GB: /^\d{9}$/, + // UNITED KINGDOM + GR: /^[A-Z]{2}\d{7}$/, + // GREECE + HR: /^\d{9}$/, + // CROATIA + HU: /^[A-Z]{2}(\d{6}|\d{7})$/, + // HUNGARY + IE: /^[A-Z0-9]{2}\d{7}$/, + // IRELAND + IN: /^[A-Z]{1}-?\d{7}$/, + // INDIA + ID: /^[A-C]\d{7}$/, + // INDONESIA + IR: /^[A-Z]\d{8}$/, + // IRAN + IS: /^(A)\d{7}$/, + // ICELAND + IT: /^[A-Z0-9]{2}\d{7}$/, + // ITALY + JP: /^[A-Z]{2}\d{7}$/, + // JAPAN + KR: /^[MS]\d{8}$/, + // SOUTH KOREA, REPUBLIC OF KOREA, [S=PS Passports, M=PM Passports] + LT: /^[A-Z0-9]{8}$/, + // LITHUANIA + LU: /^[A-Z0-9]{8}$/, + // LUXEMBURG + LV: /^[A-Z0-9]{2}\d{7}$/, + // LATVIA + LY: /^[A-Z0-9]{8}$/, + // LIBYA + MT: /^\d{7}$/, + // MALTA + MZ: /^([A-Z]{2}\d{7})|(\d{2}[A-Z]{2}\d{5})$/, + // MOZAMBIQUE + MY: /^[AHK]\d{8}$/, + // MALAYSIA + NL: /^[A-Z]{2}[A-Z0-9]{6}\d$/, + // NETHERLANDS + PL: /^[A-Z]{2}\d{7}$/, + // POLAND + PT: /^[A-Z]\d{6}$/, + // PORTUGAL + RO: /^\d{8,9}$/, + // ROMANIA + RU: /^\d{9}$/, + // RUSSIAN FEDERATION + SE: /^\d{8}$/, + // SWEDEN + SL: /^(P)[A-Z]\d{7}$/, + // SLOVANIA + SK: /^[0-9A-Z]\d{7}$/, + // SLOVAKIA + TR: /^[A-Z]\d{8}$/, + // TURKEY + UA: /^[A-Z]{2}\d{6}$/, + // UKRAINE + US: /^\d{9}$/ // UNITED STATES + +}; +/** + * Check if str is a valid passport number + * relative to provided ISO Country Code. + * + * @param {string} str + * @param {string} countryCode + * @return {boolean} + */ + +function isPassportNumber(str, countryCode) { + assertString(str); + /** Remove All Whitespaces, Convert to UPPERCASE */ + + var normalizedStr = str.replace(/\s/g, '').toUpperCase(); + return countryCode.toUpperCase() in passportRegexByCountryCode && passportRegexByCountryCode[countryCode].test(normalizedStr); +} + +var _int = /^(?:[-+]?(?:0|[1-9][0-9]*))$/; +var intLeadingZeroes = /^[-+]?[0-9]+$/; +function isInt(str, options) { + assertString(str); + options = options || {}; // Get the regex to use for testing, based on whether + // leading zeroes are allowed or not. + + var regex = options.hasOwnProperty('allow_leading_zeroes') && !options.allow_leading_zeroes ? _int : intLeadingZeroes; // Check min/max/lt/gt + + var minCheckPassed = !options.hasOwnProperty('min') || str >= options.min; + var maxCheckPassed = !options.hasOwnProperty('max') || str <= options.max; + var ltCheckPassed = !options.hasOwnProperty('lt') || str < options.lt; + var gtCheckPassed = !options.hasOwnProperty('gt') || str > options.gt; + return regex.test(str) && minCheckPassed && maxCheckPassed && ltCheckPassed && gtCheckPassed; +} + +function isPort(str) { + return isInt(str, { + min: 0, + max: 65535 + }); +} + +function isLowercase(str) { + assertString(str); + return str === str.toLowerCase(); +} + +function isUppercase(str) { + assertString(str); + return str === str.toUpperCase(); +} + +var imeiRegexWithoutHypens = /^[0-9]{15}$/; +var imeiRegexWithHypens = /^\d{2}-\d{6}-\d{6}-\d{1}$/; +function isIMEI(str, options) { + assertString(str); + options = options || {}; // default regex for checking imei is the one without hyphens + + var imeiRegex = imeiRegexWithoutHypens; + + if (options.allow_hyphens) { + imeiRegex = imeiRegexWithHypens; + } + + if (!imeiRegex.test(str)) { + return false; + } + + str = str.replace(/-/g, ''); + var sum = 0, + mul = 2, + l = 14; + + for (var i = 0; i < l; i++) { + var digit = str.substring(l - i - 1, l - i); + var tp = parseInt(digit, 10) * mul; + + if (tp >= 10) { + sum += tp % 10 + 1; + } else { + sum += tp; + } + + if (mul === 1) { + mul += 1; + } else { + mul -= 1; + } + } + + var chk = (10 - sum % 10) % 10; + + if (chk !== parseInt(str.substring(14, 15), 10)) { + return false; + } + + return true; +} + +/* eslint-disable no-control-regex */ + +var ascii = /^[\x00-\x7F]+$/; +/* eslint-enable no-control-regex */ + +function isAscii(str) { + assertString(str); + return ascii.test(str); +} + +var fullWidth = /[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/; +function isFullWidth(str) { + assertString(str); + return fullWidth.test(str); +} + +var halfWidth = /[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/; +function isHalfWidth(str) { + assertString(str); + return halfWidth.test(str); +} + +function isVariableWidth(str) { + assertString(str); + return fullWidth.test(str) && halfWidth.test(str); +} + +/* eslint-disable no-control-regex */ + +var multibyte = /[^\x00-\x7F]/; +/* eslint-enable no-control-regex */ + +function isMultibyte(str) { + assertString(str); + return multibyte.test(str); +} + +/** + * Build RegExp object from an array + * of multiple/multi-line regexp parts + * + * @param {string[]} parts + * @param {string} flags + * @return {object} - RegExp object + */ +function multilineRegexp(parts, flags) { + var regexpAsStringLiteral = parts.join(''); + return new RegExp(regexpAsStringLiteral, flags); +} + +/** + * Regular Expression to match + * semantic versioning (SemVer) + * built from multi-line, multi-parts regexp + * Reference: https://semver.org/ + */ + +var semanticVersioningRegex = multilineRegexp(['^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)', '(?:-((?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*))*))', '?(?:\\+([0-9a-z-]+(?:\\.[0-9a-z-]+)*))?$'], 'i'); +function isSemVer(str) { + assertString(str); + return semanticVersioningRegex.test(str); +} + +var surrogatePair = /[\uD800-\uDBFF][\uDC00-\uDFFF]/; +function isSurrogatePair(str) { + assertString(str); + return surrogatePair.test(str); +} + +var includes = function includes(arr, val) { + return arr.some(function (arrVal) { + return val === arrVal; + }); +}; + +function decimalRegExp(options) { + var regExp = new RegExp("^[-+]?([0-9]+)?(\\".concat(decimal[options.locale], "[0-9]{").concat(options.decimal_digits, "})").concat(options.force_decimal ? '' : '?', "$")); + return regExp; +} + +var default_decimal_options = { + force_decimal: false, + decimal_digits: '1,', + locale: 'en-US' +}; +var blacklist = ['', '-', '+']; +function isDecimal(str, options) { + assertString(str); + options = merge(options, default_decimal_options); + + if (options.locale in decimal) { + return !includes(blacklist, str.replace(/ /g, '')) && decimalRegExp(options).test(str); + } + + throw new Error("Invalid locale '".concat(options.locale, "'")); +} + +var hexadecimal = /^(0x|0h)?[0-9A-F]+$/i; +function isHexadecimal(str) { + assertString(str); + return hexadecimal.test(str); +} + +var octal = /^(0o)?[0-7]+$/i; +function isOctal(str) { + assertString(str); + return octal.test(str); +} + +function isDivisibleBy(str, num) { + assertString(str); + return toFloat(str) % parseInt(num, 10) === 0; +} + +var hexcolor = /^#?([0-9A-F]{3}|[0-9A-F]{4}|[0-9A-F]{6}|[0-9A-F]{8})$/i; +function isHexColor(str) { + assertString(str); + return hexcolor.test(str); +} + +var rgbColor = /^rgb\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\)$/; +var rgbaColor = /^rgba\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)$/; +var rgbColorPercent = /^rgb\((([0-9]%|[1-9][0-9]%|100%),){2}([0-9]%|[1-9][0-9]%|100%)\)/; +var rgbaColorPercent = /^rgba\((([0-9]%|[1-9][0-9]%|100%),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)/; +function isRgbColor(str) { + var includePercentValues = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true; + assertString(str); + + if (!includePercentValues) { + return rgbColor.test(str) || rgbaColor.test(str); + } + + return rgbColor.test(str) || rgbaColor.test(str) || rgbColorPercent.test(str) || rgbaColorPercent.test(str); +} + +var hslComma = /^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(,(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}(,((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?))?\)$/i; +var hslSpace = /^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(\s(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}\s?(\/\s((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?)\s?)?\)$/i; +function isHSL(str) { + assertString(str); // Strip duplicate spaces before calling the validation regex (See #1598 for more info) + + var strippedStr = str.replace(/\s+/g, ' ').replace(/\s?(hsla?\(|\)|,)\s?/ig, '$1'); + + if (strippedStr.indexOf(',') !== -1) { + return hslComma.test(strippedStr); + } + + return hslSpace.test(strippedStr); +} + +var isrc = /^[A-Z]{2}[0-9A-Z]{3}\d{2}\d{5}$/; +function isISRC(str) { + assertString(str); + return isrc.test(str); +} + +/** + * List of country codes with + * corresponding IBAN regular expression + * Reference: https://en.wikipedia.org/wiki/International_Bank_Account_Number + */ + +var ibanRegexThroughCountryCode = { + AD: /^(AD[0-9]{2})\d{8}[A-Z0-9]{12}$/, + AE: /^(AE[0-9]{2})\d{3}\d{16}$/, + AL: /^(AL[0-9]{2})\d{8}[A-Z0-9]{16}$/, + AT: /^(AT[0-9]{2})\d{16}$/, + AZ: /^(AZ[0-9]{2})[A-Z0-9]{4}\d{20}$/, + BA: /^(BA[0-9]{2})\d{16}$/, + BE: /^(BE[0-9]{2})\d{12}$/, + BG: /^(BG[0-9]{2})[A-Z]{4}\d{6}[A-Z0-9]{8}$/, + BH: /^(BH[0-9]{2})[A-Z]{4}[A-Z0-9]{14}$/, + BR: /^(BR[0-9]{2})\d{23}[A-Z]{1}[A-Z0-9]{1}$/, + BY: /^(BY[0-9]{2})[A-Z0-9]{4}\d{20}$/, + CH: /^(CH[0-9]{2})\d{5}[A-Z0-9]{12}$/, + CR: /^(CR[0-9]{2})\d{18}$/, + CY: /^(CY[0-9]{2})\d{8}[A-Z0-9]{16}$/, + CZ: /^(CZ[0-9]{2})\d{20}$/, + DE: /^(DE[0-9]{2})\d{18}$/, + DK: /^(DK[0-9]{2})\d{14}$/, + DO: /^(DO[0-9]{2})[A-Z]{4}\d{20}$/, + EE: /^(EE[0-9]{2})\d{16}$/, + EG: /^(EG[0-9]{2})\d{25}$/, + ES: /^(ES[0-9]{2})\d{20}$/, + FI: /^(FI[0-9]{2})\d{14}$/, + FO: /^(FO[0-9]{2})\d{14}$/, + FR: /^(FR[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/, + GB: /^(GB[0-9]{2})[A-Z]{4}\d{14}$/, + GE: /^(GE[0-9]{2})[A-Z0-9]{2}\d{16}$/, + GI: /^(GI[0-9]{2})[A-Z]{4}[A-Z0-9]{15}$/, + GL: /^(GL[0-9]{2})\d{14}$/, + GR: /^(GR[0-9]{2})\d{7}[A-Z0-9]{16}$/, + GT: /^(GT[0-9]{2})[A-Z0-9]{4}[A-Z0-9]{20}$/, + HR: /^(HR[0-9]{2})\d{17}$/, + HU: /^(HU[0-9]{2})\d{24}$/, + IE: /^(IE[0-9]{2})[A-Z0-9]{4}\d{14}$/, + IL: /^(IL[0-9]{2})\d{19}$/, + IQ: /^(IQ[0-9]{2})[A-Z]{4}\d{15}$/, + IR: /^(IR[0-9]{2})0\d{2}0\d{18}$/, + IS: /^(IS[0-9]{2})\d{22}$/, + IT: /^(IT[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/, + JO: /^(JO[0-9]{2})[A-Z]{4}\d{22}$/, + KW: /^(KW[0-9]{2})[A-Z]{4}[A-Z0-9]{22}$/, + KZ: /^(KZ[0-9]{2})\d{3}[A-Z0-9]{13}$/, + LB: /^(LB[0-9]{2})\d{4}[A-Z0-9]{20}$/, + LC: /^(LC[0-9]{2})[A-Z]{4}[A-Z0-9]{24}$/, + LI: /^(LI[0-9]{2})\d{5}[A-Z0-9]{12}$/, + LT: /^(LT[0-9]{2})\d{16}$/, + LU: /^(LU[0-9]{2})\d{3}[A-Z0-9]{13}$/, + LV: /^(LV[0-9]{2})[A-Z]{4}[A-Z0-9]{13}$/, + MC: /^(MC[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/, + MD: /^(MD[0-9]{2})[A-Z0-9]{20}$/, + ME: /^(ME[0-9]{2})\d{18}$/, + MK: /^(MK[0-9]{2})\d{3}[A-Z0-9]{10}\d{2}$/, + MR: /^(MR[0-9]{2})\d{23}$/, + MT: /^(MT[0-9]{2})[A-Z]{4}\d{5}[A-Z0-9]{18}$/, + MU: /^(MU[0-9]{2})[A-Z]{4}\d{19}[A-Z]{3}$/, + MZ: /^(MZ[0-9]{2})\d{21}$/, + NL: /^(NL[0-9]{2})[A-Z]{4}\d{10}$/, + NO: /^(NO[0-9]{2})\d{11}$/, + PK: /^(PK[0-9]{2})[A-Z0-9]{4}\d{16}$/, + PL: /^(PL[0-9]{2})\d{24}$/, + PS: /^(PS[0-9]{2})[A-Z0-9]{4}\d{21}$/, + PT: /^(PT[0-9]{2})\d{21}$/, + QA: /^(QA[0-9]{2})[A-Z]{4}[A-Z0-9]{21}$/, + RO: /^(RO[0-9]{2})[A-Z]{4}[A-Z0-9]{16}$/, + RS: /^(RS[0-9]{2})\d{18}$/, + SA: /^(SA[0-9]{2})\d{2}[A-Z0-9]{18}$/, + SC: /^(SC[0-9]{2})[A-Z]{4}\d{20}[A-Z]{3}$/, + SE: /^(SE[0-9]{2})\d{20}$/, + SI: /^(SI[0-9]{2})\d{15}$/, + SK: /^(SK[0-9]{2})\d{20}$/, + SM: /^(SM[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/, + SV: /^(SV[0-9]{2})[A-Z0-9]{4}\d{20}$/, + TL: /^(TL[0-9]{2})\d{19}$/, + TN: /^(TN[0-9]{2})\d{20}$/, + TR: /^(TR[0-9]{2})\d{5}[A-Z0-9]{17}$/, + UA: /^(UA[0-9]{2})\d{6}[A-Z0-9]{19}$/, + VA: /^(VA[0-9]{2})\d{18}$/, + VG: /^(VG[0-9]{2})[A-Z0-9]{4}\d{16}$/, + XK: /^(XK[0-9]{2})\d{16}$/ +}; +/** + * Check whether string has correct universal IBAN format + * The IBAN consists of up to 34 alphanumeric characters, as follows: + * Country Code using ISO 3166-1 alpha-2, two letters + * check digits, two digits and + * Basic Bank Account Number (BBAN), up to 30 alphanumeric characters. + * NOTE: Permitted IBAN characters are: digits [0-9] and the 26 latin alphabetic [A-Z] + * + * @param {string} str - string under validation + * @return {boolean} + */ + +function hasValidIbanFormat(str) { + // Strip white spaces and hyphens + var strippedStr = str.replace(/[\s\-]+/gi, '').toUpperCase(); + var isoCountryCode = strippedStr.slice(0, 2).toUpperCase(); + return isoCountryCode in ibanRegexThroughCountryCode && ibanRegexThroughCountryCode[isoCountryCode].test(strippedStr); +} +/** + * Check whether string has valid IBAN Checksum + * by performing basic mod-97 operation and + * the remainder should equal 1 + * -- Start by rearranging the IBAN by moving the four initial characters to the end of the string + * -- Replace each letter in the string with two digits, A -> 10, B = 11, Z = 35 + * -- Interpret the string as a decimal integer and + * -- compute the remainder on division by 97 (mod 97) + * Reference: https://en.wikipedia.org/wiki/International_Bank_Account_Number + * + * @param {string} str + * @return {boolean} + */ + + +function hasValidIbanChecksum(str) { + var strippedStr = str.replace(/[^A-Z0-9]+/gi, '').toUpperCase(); // Keep only digits and A-Z latin alphabetic + + var rearranged = strippedStr.slice(4) + strippedStr.slice(0, 4); + var alphaCapsReplacedWithDigits = rearranged.replace(/[A-Z]/g, function (_char) { + return _char.charCodeAt(0) - 55; + }); + var remainder = alphaCapsReplacedWithDigits.match(/\d{1,7}/g).reduce(function (acc, value) { + return Number(acc + value) % 97; + }, ''); + return remainder === 1; +} + +function isIBAN(str) { + assertString(str); + return hasValidIbanFormat(str) && hasValidIbanChecksum(str); +} +var locales$3 = Object.keys(ibanRegexThroughCountryCode); + +var validISO31661Alpha2CountriesCodes = new Set(['AD', 'AE', 'AF', 'AG', 'AI', 'AL', 'AM', 'AO', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AW', 'AX', 'AZ', 'BA', 'BB', 'BD', 'BE', 'BF', 'BG', 'BH', 'BI', 'BJ', 'BL', 'BM', 'BN', 'BO', 'BQ', 'BR', 'BS', 'BT', 'BV', 'BW', 'BY', 'BZ', 'CA', 'CC', 'CD', 'CF', 'CG', 'CH', 'CI', 'CK', 'CL', 'CM', 'CN', 'CO', 'CR', 'CU', 'CV', 'CW', 'CX', 'CY', 'CZ', 'DE', 'DJ', 'DK', 'DM', 'DO', 'DZ', 'EC', 'EE', 'EG', 'EH', 'ER', 'ES', 'ET', 'FI', 'FJ', 'FK', 'FM', 'FO', 'FR', 'GA', 'GB', 'GD', 'GE', 'GF', 'GG', 'GH', 'GI', 'GL', 'GM', 'GN', 'GP', 'GQ', 'GR', 'GS', 'GT', 'GU', 'GW', 'GY', 'HK', 'HM', 'HN', 'HR', 'HT', 'HU', 'ID', 'IE', 'IL', 'IM', 'IN', 'IO', 'IQ', 'IR', 'IS', 'IT', 'JE', 'JM', 'JO', 'JP', 'KE', 'KG', 'KH', 'KI', 'KM', 'KN', 'KP', 'KR', 'KW', 'KY', 'KZ', 'LA', 'LB', 'LC', 'LI', 'LK', 'LR', 'LS', 'LT', 'LU', 'LV', 'LY', 'MA', 'MC', 'MD', 'ME', 'MF', 'MG', 'MH', 'MK', 'ML', 'MM', 'MN', 'MO', 'MP', 'MQ', 'MR', 'MS', 'MT', 'MU', 'MV', 'MW', 'MX', 'MY', 'MZ', 'NA', 'NC', 'NE', 'NF', 'NG', 'NI', 'NL', 'NO', 'NP', 'NR', 'NU', 'NZ', 'OM', 'PA', 'PE', 'PF', 'PG', 'PH', 'PK', 'PL', 'PM', 'PN', 'PR', 'PS', 'PT', 'PW', 'PY', 'QA', 'RE', 'RO', 'RS', 'RU', 'RW', 'SA', 'SB', 'SC', 'SD', 'SE', 'SG', 'SH', 'SI', 'SJ', 'SK', 'SL', 'SM', 'SN', 'SO', 'SR', 'SS', 'ST', 'SV', 'SX', 'SY', 'SZ', 'TC', 'TD', 'TF', 'TG', 'TH', 'TJ', 'TK', 'TL', 'TM', 'TN', 'TO', 'TR', 'TT', 'TV', 'TW', 'TZ', 'UA', 'UG', 'UM', 'US', 'UY', 'UZ', 'VA', 'VC', 'VE', 'VG', 'VI', 'VN', 'VU', 'WF', 'WS', 'YE', 'YT', 'ZA', 'ZM', 'ZW']); +function isISO31661Alpha2(str) { + assertString(str); + return validISO31661Alpha2CountriesCodes.has(str.toUpperCase()); +} +var CountryCodes = validISO31661Alpha2CountriesCodes; + +var isBICReg = /^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$/; +function isBIC(str) { + assertString(str); // toUpperCase() should be removed when a new major version goes out that changes + // the regex to [A-Z] (per the spec). + + if (!CountryCodes.has(str.slice(4, 6).toUpperCase())) { + return false; + } + + return isBICReg.test(str); +} + +var md5 = /^[a-f0-9]{32}$/; +function isMD5(str) { + assertString(str); + return md5.test(str); +} + +var lengths = { + md5: 32, + md4: 32, + sha1: 40, + sha256: 64, + sha384: 96, + sha512: 128, + ripemd128: 32, + ripemd160: 40, + tiger128: 32, + tiger160: 40, + tiger192: 48, + crc32: 8, + crc32b: 8 +}; +function isHash(str, algorithm) { + assertString(str); + var hash = new RegExp("^[a-fA-F0-9]{".concat(lengths[algorithm], "}$")); + return hash.test(str); +} + +var notBase64 = /[^A-Z0-9+\/=]/i; +var urlSafeBase64 = /^[A-Z0-9_\-]*$/i; +var defaultBase64Options = { + urlSafe: false +}; +function isBase64(str, options) { + assertString(str); + options = merge(options, defaultBase64Options); + var len = str.length; + + if (options.urlSafe) { + return urlSafeBase64.test(str); + } + + if (len % 4 !== 0 || notBase64.test(str)) { + return false; + } + + var firstPaddingChar = str.indexOf('='); + return firstPaddingChar === -1 || firstPaddingChar === len - 1 || firstPaddingChar === len - 2 && str[len - 1] === '='; +} + +function isJWT(str) { + assertString(str); + var dotSplit = str.split('.'); + var len = dotSplit.length; + + if (len > 3 || len < 2) { + return false; + } + + return dotSplit.reduce(function (acc, currElem) { + return acc && isBase64(currElem, { + urlSafe: true + }); + }, true); +} + +var default_json_options = { + allow_primitives: false +}; +function isJSON(str, options) { + assertString(str); + + try { + options = merge(options, default_json_options); + var primitives = []; + + if (options.allow_primitives) { + primitives = [null, false, true]; + } + + var obj = JSON.parse(str); + return primitives.includes(obj) || !!obj && _typeof(obj) === 'object'; + } catch (e) { + /* ignore */ + } + + return false; +} + +var default_is_empty_options = { + ignore_whitespace: false +}; +function isEmpty(str, options) { + assertString(str); + options = merge(options, default_is_empty_options); + return (options.ignore_whitespace ? str.trim().length : str.length) === 0; +} + +/* eslint-disable prefer-rest-params */ + +function isLength(str, options) { + assertString(str); + var min; + var max; + + if (_typeof(options) === 'object') { + min = options.min || 0; + max = options.max; + } else { + // backwards compatibility: isLength(str, min [, max]) + min = arguments[1] || 0; + max = arguments[2]; + } + + var surrogatePairs = str.match(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g) || []; + var len = str.length - surrogatePairs.length; + return len >= min && (typeof max === 'undefined' || len <= max); +} + +var uuid = { + 1: /^[0-9A-F]{8}-[0-9A-F]{4}-1[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 2: /^[0-9A-F]{8}-[0-9A-F]{4}-2[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 3: /^[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + 4: /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, + 5: /^[0-9A-F]{8}-[0-9A-F]{4}-5[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, + all: /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i +}; +function isUUID(str, version) { + assertString(str); + var pattern = uuid[![undefined, null].includes(version) ? version : 'all']; + return !!pattern && pattern.test(str); +} + +function isMongoId(str) { + assertString(str); + return isHexadecimal(str) && str.length === 24; +} + +function isAfter(str) { + var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : String(new Date()); + assertString(str); + var comparison = toDate(date); + var original = toDate(str); + return !!(original && comparison && original > comparison); +} + +function isBefore(str) { + var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : String(new Date()); + assertString(str); + var comparison = toDate(date); + var original = toDate(str); + return !!(original && comparison && original < comparison); +} + +function isIn(str, options) { + assertString(str); + var i; + + if (Object.prototype.toString.call(options) === '[object Array]') { + var array = []; + + for (i in options) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if ({}.hasOwnProperty.call(options, i)) { + array[i] = toString$1(options[i]); + } + } + + return array.indexOf(str) >= 0; + } else if (_typeof(options) === 'object') { + return options.hasOwnProperty(str); + } else if (options && typeof options.indexOf === 'function') { + return options.indexOf(str) >= 0; + } + + return false; +} + +/* eslint-disable max-len */ + +var creditCard = /^(?:4[0-9]{12}(?:[0-9]{3,6})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12,15}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11}|6[27][0-9]{14}|^(81[0-9]{14,17}))$/; +/* eslint-enable max-len */ + +function isCreditCard(str) { + assertString(str); + var sanitized = str.replace(/[- ]+/g, ''); + + if (!creditCard.test(sanitized)) { + return false; + } + + var sum = 0; + var digit; + var tmpNum; + var shouldDouble; + + for (var i = sanitized.length - 1; i >= 0; i--) { + digit = sanitized.substring(i, i + 1); + tmpNum = parseInt(digit, 10); + + if (shouldDouble) { + tmpNum *= 2; + + if (tmpNum >= 10) { + sum += tmpNum % 10 + 1; + } else { + sum += tmpNum; + } + } else { + sum += tmpNum; + } + + shouldDouble = !shouldDouble; + } + + return !!(sum % 10 === 0 ? sanitized : false); +} + +var validators = { + PL: function PL(str) { + assertString(str); + var weightOfDigits = { + 1: 1, + 2: 3, + 3: 7, + 4: 9, + 5: 1, + 6: 3, + 7: 7, + 8: 9, + 9: 1, + 10: 3, + 11: 0 + }; + + if (str != null && str.length === 11 && isInt(str, { + allow_leading_zeroes: true + })) { + var digits = str.split('').slice(0, -1); + var sum = digits.reduce(function (acc, digit, index) { + return acc + Number(digit) * weightOfDigits[index + 1]; + }, 0); + var modulo = sum % 10; + var lastDigit = Number(str.charAt(str.length - 1)); + + if (modulo === 0 && lastDigit === 0 || lastDigit === 10 - modulo) { + return true; + } + } + + return false; + }, + ES: function ES(str) { + assertString(str); + var DNI = /^[0-9X-Z][0-9]{7}[TRWAGMYFPDXBNJZSQVHLCKE]$/; + var charsValue = { + X: 0, + Y: 1, + Z: 2 + }; + var controlDigits = ['T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J', 'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E']; // sanitize user input + + var sanitized = str.trim().toUpperCase(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } // validate the control digit + + + var number = sanitized.slice(0, -1).replace(/[X,Y,Z]/g, function (_char) { + return charsValue[_char]; + }); + return sanitized.endsWith(controlDigits[number % 23]); + }, + FI: function FI(str) { + // https://dvv.fi/en/personal-identity-code#:~:text=control%20character%20for%20a-,personal,-identity%20code%20calculated + assertString(str); + + if (str.length !== 11) { + return false; + } + + if (!str.match(/^\d{6}[\-A\+]\d{3}[0-9ABCDEFHJKLMNPRSTUVWXY]{1}$/)) { + return false; + } + + var checkDigits = '0123456789ABCDEFHJKLMNPRSTUVWXY'; + var idAsNumber = parseInt(str.slice(0, 6), 10) * 1000 + parseInt(str.slice(7, 10), 10); + var remainder = idAsNumber % 31; + var checkDigit = checkDigits[remainder]; + return checkDigit === str.slice(10, 11); + }, + IN: function IN(str) { + var DNI = /^[1-9]\d{3}\s?\d{4}\s?\d{4}$/; // multiplication table + + var d = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 0, 6, 7, 8, 9, 5], [2, 3, 4, 0, 1, 7, 8, 9, 5, 6], [3, 4, 0, 1, 2, 8, 9, 5, 6, 7], [4, 0, 1, 2, 3, 9, 5, 6, 7, 8], [5, 9, 8, 7, 6, 0, 4, 3, 2, 1], [6, 5, 9, 8, 7, 1, 0, 4, 3, 2], [7, 6, 5, 9, 8, 2, 1, 0, 4, 3], [8, 7, 6, 5, 9, 3, 2, 1, 0, 4], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]; // permutation table + + var p = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 5, 7, 6, 2, 8, 3, 0, 9, 4], [5, 8, 0, 3, 7, 9, 6, 1, 4, 2], [8, 9, 1, 6, 0, 4, 3, 5, 2, 7], [9, 4, 5, 3, 1, 2, 6, 8, 7, 0], [4, 2, 8, 6, 5, 7, 3, 9, 0, 1], [2, 7, 9, 3, 8, 0, 6, 4, 1, 5], [7, 0, 4, 6, 9, 1, 3, 2, 5, 8]]; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + var c = 0; + var invertedArray = sanitized.replace(/\s/g, '').split('').map(Number).reverse(); + invertedArray.forEach(function (val, i) { + c = d[c][p[i % 8][val]]; + }); + return c === 0; + }, + IR: function IR(str) { + if (!str.match(/^\d{10}$/)) return false; + str = "0000".concat(str).substr(str.length - 6); + if (parseInt(str.substr(3, 6), 10) === 0) return false; + var lastNumber = parseInt(str.substr(9, 1), 10); + var sum = 0; + + for (var i = 0; i < 9; i++) { + sum += parseInt(str.substr(i, 1), 10) * (10 - i); + } + + sum %= 11; + return sum < 2 && lastNumber === sum || sum >= 2 && lastNumber === 11 - sum; + }, + IT: function IT(str) { + if (str.length !== 9) return false; + if (str === 'CA00000AA') return false; // https://it.wikipedia.org/wiki/Carta_d%27identit%C3%A0_elettronica_italiana + + return str.search(/C[A-Z][0-9]{5}[A-Z]{2}/i) > -1; + }, + NO: function NO(str) { + var sanitized = str.trim(); + if (isNaN(Number(sanitized))) return false; + if (sanitized.length !== 11) return false; + if (sanitized === '00000000000') return false; // https://no.wikipedia.org/wiki/F%C3%B8dselsnummer + + var f = sanitized.split('').map(Number); + var k1 = (11 - (3 * f[0] + 7 * f[1] + 6 * f[2] + 1 * f[3] + 8 * f[4] + 9 * f[5] + 4 * f[6] + 5 * f[7] + 2 * f[8]) % 11) % 11; + var k2 = (11 - (5 * f[0] + 4 * f[1] + 3 * f[2] + 2 * f[3] + 7 * f[4] + 6 * f[5] + 5 * f[6] + 4 * f[7] + 3 * f[8] + 2 * k1) % 11) % 11; + if (k1 !== f[9] || k2 !== f[10]) return false; + return true; + }, + TH: function TH(str) { + if (!str.match(/^[1-8]\d{12}$/)) return false; // validate check digit + + var sum = 0; + + for (var i = 0; i < 12; i++) { + sum += parseInt(str[i], 10) * (13 - i); + } + + return str[12] === ((11 - sum % 11) % 10).toString(); + }, + LK: function LK(str) { + var old_nic = /^[1-9]\d{8}[vx]$/i; + var new_nic = /^[1-9]\d{11}$/i; + if (str.length === 10 && old_nic.test(str)) return true;else if (str.length === 12 && new_nic.test(str)) return true; + return false; + }, + 'he-IL': function heIL(str) { + var DNI = /^\d{9}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + var id = sanitized; + var sum = 0, + incNum; + + for (var i = 0; i < id.length; i++) { + incNum = Number(id[i]) * (i % 2 + 1); // Multiply number by 1 or 2 + + sum += incNum > 9 ? incNum - 9 : incNum; // Sum the digits up and add to total + } + + return sum % 10 === 0; + }, + 'ar-LY': function arLY(str) { + // Libya National Identity Number NIN is 12 digits, the first digit is either 1 or 2 + var NIN = /^(1|2)\d{11}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!NIN.test(sanitized)) { + return false; + } + + return true; + }, + 'ar-TN': function arTN(str) { + var DNI = /^\d{8}$/; // sanitize user input + + var sanitized = str.trim(); // validate the data structure + + if (!DNI.test(sanitized)) { + return false; + } + + return true; + }, + 'zh-CN': function zhCN(str) { + var provincesAndCities = ['11', // 北京 + '12', // 天津 + '13', // 河北 + '14', // 山西 + '15', // 内蒙古 + '21', // 辽宁 + '22', // 吉林 + '23', // 黑龙江 + '31', // 上海 + '32', // 江苏 + '33', // 浙江 + '34', // 安徽 + '35', // 福建 + '36', // 江西 + '37', // 山东 + '41', // 河南 + '42', // 湖北 + '43', // 湖南 + '44', // 广东 + '45', // 广西 + '46', // 海南 + '50', // 重庆 + '51', // 四川 + '52', // 贵州 + '53', // 云南 + '54', // 西藏 + '61', // 陕西 + '62', // 甘肃 + '63', // 青海 + '64', // 宁夏 + '65', // 新疆 + '71', // 台湾 + '81', // 香港 + '82', // 澳门 + '91' // 国外 + ]; + var powers = ['7', '9', '10', '5', '8', '4', '2', '1', '6', '3', '7', '9', '10', '5', '8', '4', '2']; + var parityBit = ['1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2']; + + var checkAddressCode = function checkAddressCode(addressCode) { + return provincesAndCities.includes(addressCode); + }; + + var checkBirthDayCode = function checkBirthDayCode(birDayCode) { + var yyyy = parseInt(birDayCode.substring(0, 4), 10); + var mm = parseInt(birDayCode.substring(4, 6), 10); + var dd = parseInt(birDayCode.substring(6), 10); + var xdata = new Date(yyyy, mm - 1, dd); + + if (xdata > new Date()) { + return false; // eslint-disable-next-line max-len + } else if (xdata.getFullYear() === yyyy && xdata.getMonth() === mm - 1 && xdata.getDate() === dd) { + return true; + } + + return false; + }; + + var getParityBit = function getParityBit(idCardNo) { + var id17 = idCardNo.substring(0, 17); + var power = 0; + + for (var i = 0; i < 17; i++) { + power += parseInt(id17.charAt(i), 10) * parseInt(powers[i], 10); + } + + var mod = power % 11; + return parityBit[mod]; + }; + + var checkParityBit = function checkParityBit(idCardNo) { + return getParityBit(idCardNo) === idCardNo.charAt(17).toUpperCase(); + }; + + var check15IdCardNo = function check15IdCardNo(idCardNo) { + var check = /^[1-9]\d{7}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}$/.test(idCardNo); + if (!check) return false; + var addressCode = idCardNo.substring(0, 2); + check = checkAddressCode(addressCode); + if (!check) return false; + var birDayCode = "19".concat(idCardNo.substring(6, 12)); + check = checkBirthDayCode(birDayCode); + if (!check) return false; + return true; + }; + + var check18IdCardNo = function check18IdCardNo(idCardNo) { + var check = /^[1-9]\d{5}[1-9]\d{3}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}(\d|x|X)$/.test(idCardNo); + if (!check) return false; + var addressCode = idCardNo.substring(0, 2); + check = checkAddressCode(addressCode); + if (!check) return false; + var birDayCode = idCardNo.substring(6, 14); + check = checkBirthDayCode(birDayCode); + if (!check) return false; + return checkParityBit(idCardNo); + }; + + var checkIdCardNo = function checkIdCardNo(idCardNo) { + var check = /^\d{15}|(\d{17}(\d|x|X))$/.test(idCardNo); + if (!check) return false; + + if (idCardNo.length === 15) { + return check15IdCardNo(idCardNo); + } + + return check18IdCardNo(idCardNo); + }; + + return checkIdCardNo(str); + }, + 'zh-TW': function zhTW(str) { + var ALPHABET_CODES = { + A: 10, + B: 11, + C: 12, + D: 13, + E: 14, + F: 15, + G: 16, + H: 17, + I: 34, + J: 18, + K: 19, + L: 20, + M: 21, + N: 22, + O: 35, + P: 23, + Q: 24, + R: 25, + S: 26, + T: 27, + U: 28, + V: 29, + W: 32, + X: 30, + Y: 31, + Z: 33 + }; + var sanitized = str.trim().toUpperCase(); + if (!/^[A-Z][0-9]{9}$/.test(sanitized)) return false; + return Array.from(sanitized).reduce(function (sum, number, index) { + if (index === 0) { + var code = ALPHABET_CODES[number]; + return code % 10 * 9 + Math.floor(code / 10); + } + + if (index === 9) { + return (10 - sum % 10 - Number(number)) % 10 === 0; + } + + return sum + Number(number) * (9 - index); + }, 0); + } +}; +function isIdentityCard(str, locale) { + assertString(str); + + if (locale in validators) { + return validators[locale](str); + } else if (locale === 'any') { + for (var key in validators) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (validators.hasOwnProperty(key)) { + var validator = validators[key]; + + if (validator(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +/** + * The most commonly used EAN standard is + * the thirteen-digit EAN-13, while the + * less commonly used 8-digit EAN-8 barcode was + * introduced for use on small packages. + * Also EAN/UCC-14 is used for Grouping of individual + * trade items above unit level(Intermediate, Carton or Pallet). + * For more info about EAN-14 checkout: https://www.gtin.info/itf-14-barcodes/ + * EAN consists of: + * GS1 prefix, manufacturer code, product code and check digit + * Reference: https://en.wikipedia.org/wiki/International_Article_Number + * Reference: https://www.gtin.info/ + */ +/** + * Define EAN Lenghts; 8 for EAN-8; 13 for EAN-13; 14 for EAN-14 + * and Regular Expression for valid EANs (EAN-8, EAN-13, EAN-14), + * with exact numberic matching of 8 or 13 or 14 digits [0-9] + */ + +var LENGTH_EAN_8 = 8; +var LENGTH_EAN_14 = 14; +var validEanRegex = /^(\d{8}|\d{13}|\d{14})$/; +/** + * Get position weight given: + * EAN length and digit index/position + * + * @param {number} length + * @param {number} index + * @return {number} + */ + +function getPositionWeightThroughLengthAndIndex(length, index) { + if (length === LENGTH_EAN_8 || length === LENGTH_EAN_14) { + return index % 2 === 0 ? 3 : 1; + } + + return index % 2 === 0 ? 1 : 3; +} +/** + * Calculate EAN Check Digit + * Reference: https://en.wikipedia.org/wiki/International_Article_Number#Calculation_of_checksum_digit + * + * @param {string} ean + * @return {number} + */ + + +function calculateCheckDigit(ean) { + var checksum = ean.slice(0, -1).split('').map(function (_char, index) { + return Number(_char) * getPositionWeightThroughLengthAndIndex(ean.length, index); + }).reduce(function (acc, partialSum) { + return acc + partialSum; + }, 0); + var remainder = 10 - checksum % 10; + return remainder < 10 ? remainder : 0; +} +/** + * Check if string is valid EAN: + * Matches EAN-8/EAN-13/EAN-14 regex + * Has valid check digit. + * + * @param {string} str + * @return {boolean} + */ + + +function isEAN(str) { + assertString(str); + var actualCheckDigit = Number(str.slice(-1)); + return validEanRegex.test(str) && actualCheckDigit === calculateCheckDigit(str); +} + +var isin = /^[A-Z]{2}[0-9A-Z]{9}[0-9]$/; // this link details how the check digit is calculated: +// https://www.isin.org/isin-format/. it is a little bit +// odd in that it works with digits, not numbers. in order +// to make only one pass through the ISIN characters, the +// each alpha character is handled as 2 characters within +// the loop. + +function isISIN(str) { + assertString(str); + + if (!isin.test(str)) { + return false; + } + + var _double = true; + var sum = 0; // convert values + + for (var i = str.length - 2; i >= 0; i--) { + if (str[i] >= 'A' && str[i] <= 'Z') { + var value = str[i].charCodeAt(0) - 55; + var lo = value % 10; + var hi = Math.trunc(value / 10); // letters have two digits, so handle the low order + // and high order digits separately. + + for (var _i = 0, _arr = [lo, hi]; _i < _arr.length; _i++) { + var digit = _arr[_i]; + + if (_double) { + if (digit >= 5) { + sum += 1 + (digit - 5) * 2; + } else { + sum += digit * 2; + } + } else { + sum += digit; + } + + _double = !_double; + } + } else { + var _digit = str[i].charCodeAt(0) - '0'.charCodeAt(0); + + if (_double) { + if (_digit >= 5) { + sum += 1 + (_digit - 5) * 2; + } else { + sum += _digit * 2; + } + } else { + sum += _digit; + } + + _double = !_double; + } + } + + var check = Math.trunc((sum + 9) / 10) * 10 - sum; + return +str[str.length - 1] === check; +} + +var isbn10Maybe = /^(?:[0-9]{9}X|[0-9]{10})$/; +var isbn13Maybe = /^(?:[0-9]{13})$/; +var factor = [1, 3]; +function isISBN(str) { + var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; + assertString(str); + version = String(version); + + if (!version) { + return isISBN(str, 10) || isISBN(str, 13); + } + + var sanitized = str.replace(/[\s-]+/g, ''); + var checksum = 0; + var i; + + if (version === '10') { + if (!isbn10Maybe.test(sanitized)) { + return false; + } + + for (i = 0; i < 9; i++) { + checksum += (i + 1) * sanitized.charAt(i); + } + + if (sanitized.charAt(9) === 'X') { + checksum += 10 * 10; + } else { + checksum += 10 * sanitized.charAt(9); + } + + if (checksum % 11 === 0) { + return !!sanitized; + } + } else if (version === '13') { + if (!isbn13Maybe.test(sanitized)) { + return false; + } + + for (i = 0; i < 12; i++) { + checksum += factor[i % 2] * sanitized.charAt(i); + } + + if (sanitized.charAt(12) - (10 - checksum % 10) % 10 === 0) { + return !!sanitized; + } + } + + return false; +} + +var issn = '^\\d{4}-?\\d{3}[\\dX]$'; +function isISSN(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + assertString(str); + var testIssn = issn; + testIssn = options.require_hyphen ? testIssn.replace('?', '') : testIssn; + testIssn = options.case_sensitive ? new RegExp(testIssn) : new RegExp(testIssn, 'i'); + + if (!testIssn.test(str)) { + return false; + } + + var digits = str.replace('-', '').toUpperCase(); + var checksum = 0; + + for (var i = 0; i < digits.length; i++) { + var digit = digits[i]; + checksum += (digit === 'X' ? 10 : +digit) * (8 - i); + } + + return checksum % 11 === 0; +} + +/** + * Algorithmic validation functions + * May be used as is or implemented in the workflow of other validators. + */ + +/* + * ISO 7064 validation function + * Called with a string of numbers (incl. check digit) + * to validate according to ISO 7064 (MOD 11, 10). + */ +function iso7064Check(str) { + var checkvalue = 10; + + for (var i = 0; i < str.length - 1; i++) { + checkvalue = (parseInt(str[i], 10) + checkvalue) % 10 === 0 ? 10 * 2 % 11 : (parseInt(str[i], 10) + checkvalue) % 10 * 2 % 11; + } + + checkvalue = checkvalue === 1 ? 0 : 11 - checkvalue; + return checkvalue === parseInt(str[10], 10); +} +/* + * Luhn (mod 10) validation function + * Called with a string of numbers (incl. check digit) + * to validate according to the Luhn algorithm. + */ + +function luhnCheck(str) { + var checksum = 0; + var second = false; + + for (var i = str.length - 1; i >= 0; i--) { + if (second) { + var product = parseInt(str[i], 10) * 2; + + if (product > 9) { + // sum digits of product and add to checksum + checksum += product.toString().split('').map(function (a) { + return parseInt(a, 10); + }).reduce(function (a, b) { + return a + b; + }, 0); + } else { + checksum += product; + } + } else { + checksum += parseInt(str[i], 10); + } + + second = !second; + } + + return checksum % 10 === 0; +} +/* + * Reverse TIN multiplication and summation helper function + * Called with an array of single-digit integers and a base multiplier + * to calculate the sum of the digits multiplied in reverse. + * Normally used in variations of MOD 11 algorithmic checks. + */ + +function reverseMultiplyAndSum(digits, base) { + var total = 0; + + for (var i = 0; i < digits.length; i++) { + total += digits[i] * (base - i); + } + + return total; +} +/* + * Verhoeff validation helper function + * Called with a string of numbers + * to validate according to the Verhoeff algorithm. + */ + +function verhoeffCheck(str) { + var d_table = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 2, 3, 4, 0, 6, 7, 8, 9, 5], [2, 3, 4, 0, 1, 7, 8, 9, 5, 6], [3, 4, 0, 1, 2, 8, 9, 5, 6, 7], [4, 0, 1, 2, 3, 9, 5, 6, 7, 8], [5, 9, 8, 7, 6, 0, 4, 3, 2, 1], [6, 5, 9, 8, 7, 1, 0, 4, 3, 2], [7, 6, 5, 9, 8, 2, 1, 0, 4, 3], [8, 7, 6, 5, 9, 3, 2, 1, 0, 4], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0]]; + var p_table = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [1, 5, 7, 6, 2, 8, 3, 0, 9, 4], [5, 8, 0, 3, 7, 9, 6, 1, 4, 2], [8, 9, 1, 6, 0, 4, 3, 5, 2, 7], [9, 4, 5, 3, 1, 2, 6, 8, 7, 0], [4, 2, 8, 6, 5, 7, 3, 9, 0, 1], [2, 7, 9, 3, 8, 0, 6, 4, 1, 5], [7, 0, 4, 6, 9, 1, 3, 2, 5, 8]]; // Copy (to prevent replacement) and reverse + + var str_copy = str.split('').reverse().join(''); + var checksum = 0; + + for (var i = 0; i < str_copy.length; i++) { + checksum = d_table[checksum][p_table[i % 8][parseInt(str_copy[i], 10)]]; + } + + return checksum === 0; +} + +/** + * TIN Validation + * Validates Tax Identification Numbers (TINs) from the US, EU member states and the United Kingdom. + * + * EU-UK: + * National TIN validity is calculated using public algorithms as made available by DG TAXUD. + * + * See `https://ec.europa.eu/taxation_customs/tin/specs/FS-TIN%20Algorithms-Public.docx` for more information. + * + * US: + * An Employer Identification Number (EIN), also known as a Federal Tax Identification Number, + * is used to identify a business entity. + * + * NOTES: + * - Prefix 47 is being reserved for future use + * - Prefixes 26, 27, 45, 46 and 47 were previously assigned by the Philadelphia campus. + * + * See `http://www.irs.gov/Businesses/Small-Businesses-&-Self-Employed/How-EINs-are-Assigned-and-Valid-EIN-Prefixes` + * for more information. + */ +// Locale functions + +/* + * bg-BG validation function + * (Edinen graždanski nomer (EGN/ЕГН), persons only) + * Checks if birth date (first six digits) is valid and calculates check (last) digit + */ + +function bgBgCheck(tin) { + // Extract full year, normalize month and check birth date validity + var century_year = tin.slice(0, 2); + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 40) { + month -= 40; + century_year = "20".concat(century_year); + } else if (month > 20) { + month -= 20; + century_year = "18".concat(century_year); + } else { + century_year = "19".concat(century_year); + } + + if (month < 10) { + month = "0".concat(month); + } + + var date = "".concat(century_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); // Calculate checksum by multiplying digits with fixed values + + var multip_lookup = [2, 4, 8, 5, 10, 9, 7, 3, 6]; + var checksum = 0; + + for (var i = 0; i < multip_lookup.length; i++) { + checksum += digits[i] * multip_lookup[i]; + } + + checksum = checksum % 11 === 10 ? 0 : checksum % 11; + return checksum === digits[9]; +} +/* + * cs-CZ validation function + * (Rodné číslo (RČ), persons only) + * Checks if birth date (first six digits) is valid and divisibility by 11 + * Material not in DG TAXUD document sourced from: + * -`https://lorenc.info/3MA381/overeni-spravnosti-rodneho-cisla.htm` + * -`https://www.mvcr.cz/clanek/rady-a-sluzby-dokumenty-rodne-cislo.aspx` + */ + + +function csCzCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract full year from TIN length + + var full_year = parseInt(tin.slice(0, 2), 10); + + if (tin.length === 10) { + if (full_year < 54) { + full_year = "20".concat(full_year); + } else { + full_year = "19".concat(full_year); + } + } else { + if (tin.slice(6) === '000') { + return false; + } // Three-zero serial not assigned before 1954 + + + if (full_year < 54) { + full_year = "19".concat(full_year); + } else { + return false; // No 18XX years seen in any of the resources + } + } // Add missing zero if needed + + + if (full_year.length === 3) { + full_year = [full_year.slice(0, 2), '0', full_year.slice(2)].join(''); + } // Extract month from TIN and normalize + + + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 50) { + month -= 50; + } + + if (month > 20) { + // Month-plus-twenty was only introduced in 2004 + if (parseInt(full_year, 10) < 2004) { + return false; + } + + month -= 20; + } + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Verify divisibility by 11 + + + if (tin.length === 10) { + if (parseInt(tin, 10) % 11 !== 0) { + // Some numbers up to and including 1985 are still valid if + // check (last) digit equals 0 and modulo of first 9 digits equals 10 + var checkdigit = parseInt(tin.slice(0, 9), 10) % 11; + + if (parseInt(full_year, 10) < 1986 && checkdigit === 10) { + if (parseInt(tin.slice(9), 10) !== 0) { + return false; + } + } else { + return false; + } + } + } + + return true; +} +/* + * de-AT validation function + * (Abgabenkontonummer, persons/entities) + * Verify TIN validity by calling luhnCheck() + */ + + +function deAtCheck(tin) { + return luhnCheck(tin); +} +/* + * de-DE validation function + * (Steueridentifikationsnummer (Steuer-IdNr.), persons only) + * Tests for single duplicate/triplicate value, then calculates ISO 7064 check (last) digit + * Partial implementation of spec (same result with both algorithms always) + */ + + +function deDeCheck(tin) { + // Split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); // Fill array with strings of number positions + + var occurences = []; + + for (var i = 0; i < digits.length - 1; i++) { + occurences.push(''); + + for (var j = 0; j < digits.length - 1; j++) { + if (digits[i] === digits[j]) { + occurences[i] += j; + } + } + } // Remove digits with one occurence and test for only one duplicate/triplicate + + + occurences = occurences.filter(function (a) { + return a.length > 1; + }); + + if (occurences.length !== 2 && occurences.length !== 3) { + return false; + } // In case of triplicate value only two digits are allowed next to each other + + + if (occurences[0].length === 3) { + var trip_locations = occurences[0].split('').map(function (a) { + return parseInt(a, 10); + }); + var recurrent = 0; // Amount of neighbour occurences + + for (var _i = 0; _i < trip_locations.length - 1; _i++) { + if (trip_locations[_i] + 1 === trip_locations[_i + 1]) { + recurrent += 1; + } + } + + if (recurrent === 2) { + return false; + } + } + + return iso7064Check(tin); +} +/* + * dk-DK validation function + * (CPR-nummer (personnummer), persons only) + * Checks if birth date (first six digits) is valid and assigned to century (seventh) digit, + * and calculates check (last) digit + */ + + +function dkDkCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract year, check if valid for given century digit and add century + + var year = parseInt(tin.slice(4, 6), 10); + var century_digit = tin.slice(6, 7); + + switch (century_digit) { + case '0': + case '1': + case '2': + case '3': + year = "19".concat(year); + break; + + case '4': + case '9': + if (year < 37) { + year = "20".concat(year); + } else { + year = "19".concat(year); + } + + break; + + default: + if (year < 37) { + year = "20".concat(year); + } else if (year > 58) { + year = "18".concat(year); + } else { + return false; + } + + break; + } // Add missing zero if needed + + + if (year.length === 3) { + year = [year.slice(0, 2), '0', year.slice(2)].join(''); + } // Check date validity + + + var date = "".concat(year, "/").concat(tin.slice(2, 4), "/").concat(tin.slice(0, 2)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + var weight = 4; // Multiply by weight and add to checksum + + for (var i = 0; i < 9; i++) { + checksum += digits[i] * weight; + weight -= 1; + + if (weight === 1) { + weight = 7; + } + } + + checksum %= 11; + + if (checksum === 1) { + return false; + } + + return checksum === 0 ? digits[9] === 0 : digits[9] === 11 - checksum; +} +/* + * el-CY validation function + * (Arithmos Forologikou Mitroou (AFM/ΑΦΜ), persons only) + * Verify TIN validity by calculating ASCII value of check (last) character + */ + + +function elCyCheck(tin) { + // split digits into an array for further processing + var digits = tin.slice(0, 8).split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; // add digits in even places + + for (var i = 1; i < digits.length; i += 2) { + checksum += digits[i]; + } // add digits in odd places + + + for (var _i2 = 0; _i2 < digits.length; _i2 += 2) { + if (digits[_i2] < 2) { + checksum += 1 - digits[_i2]; + } else { + checksum += 2 * (digits[_i2] - 2) + 5; + + if (digits[_i2] > 4) { + checksum += 2; + } + } + } + + return String.fromCharCode(checksum % 26 + 65) === tin.charAt(8); +} +/* + * el-GR validation function + * (Arithmos Forologikou Mitroou (AFM/ΑΦΜ), persons/entities) + * Verify TIN validity by calculating check (last) digit + * Algorithm not in DG TAXUD document- sourced from: + * - `http://epixeirisi.gr/%CE%9A%CE%A1%CE%99%CE%A3%CE%99%CE%9C%CE%91-%CE%98%CE%95%CE%9C%CE%91%CE%A4%CE%91-%CE%A6%CE%9F%CE%A1%CE%9F%CE%9B%CE%9F%CE%93%CE%99%CE%91%CE%A3-%CE%9A%CE%91%CE%99-%CE%9B%CE%9F%CE%93%CE%99%CE%A3%CE%A4%CE%99%CE%9A%CE%97%CE%A3/23791/%CE%91%CF%81%CE%B9%CE%B8%CE%BC%CF%8C%CF%82-%CE%A6%CE%BF%CF%81%CE%BF%CE%BB%CE%BF%CE%B3%CE%B9%CE%BA%CE%BF%CF%8D-%CE%9C%CE%B7%CF%84%CF%81%CF%8E%CE%BF%CF%85` + */ + + +function elGrCheck(tin) { + // split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + + for (var i = 0; i < 8; i++) { + checksum += digits[i] * Math.pow(2, 8 - i); + } + + return checksum % 11 % 10 === digits[8]; +} +/* + * en-GB validation function (should go here if needed) + * (National Insurance Number (NINO) or Unique Taxpayer Reference (UTR), + * persons/entities respectively) + */ + +/* + * en-IE validation function + * (Personal Public Service Number (PPS No), persons only) + * Verify TIN validity by calculating check (second to last) character + */ + + +function enIeCheck(tin) { + var checksum = reverseMultiplyAndSum(tin.split('').slice(0, 7).map(function (a) { + return parseInt(a, 10); + }), 8); + + if (tin.length === 9 && tin[8] !== 'W') { + checksum += (tin[8].charCodeAt(0) - 64) * 9; + } + + checksum %= 23; + + if (checksum === 0) { + return tin[7].toUpperCase() === 'W'; + } + + return tin[7].toUpperCase() === String.fromCharCode(64 + checksum); +} // Valid US IRS campus prefixes + + +var enUsCampusPrefix = { + andover: ['10', '12'], + atlanta: ['60', '67'], + austin: ['50', '53'], + brookhaven: ['01', '02', '03', '04', '05', '06', '11', '13', '14', '16', '21', '22', '23', '25', '34', '51', '52', '54', '55', '56', '57', '58', '59', '65'], + cincinnati: ['30', '32', '35', '36', '37', '38', '61'], + fresno: ['15', '24'], + internet: ['20', '26', '27', '45', '46', '47'], + kansas: ['40', '44'], + memphis: ['94', '95'], + ogden: ['80', '90'], + philadelphia: ['33', '39', '41', '42', '43', '46', '48', '62', '63', '64', '66', '68', '71', '72', '73', '74', '75', '76', '77', '81', '82', '83', '84', '85', '86', '87', '88', '91', '92', '93', '98', '99'], + sba: ['31'] +}; // Return an array of all US IRS campus prefixes + +function enUsGetPrefixes() { + var prefixes = []; + + for (var location in enUsCampusPrefix) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (enUsCampusPrefix.hasOwnProperty(location)) { + prefixes.push.apply(prefixes, _toConsumableArray(enUsCampusPrefix[location])); + } + } + + return prefixes; +} +/* + * en-US validation function + * Verify that the TIN starts with a valid IRS campus prefix + */ + + +function enUsCheck(tin) { + return enUsGetPrefixes().indexOf(tin.substr(0, 2)) !== -1; +} +/* + * es-ES validation function + * (Documento Nacional de Identidad (DNI) + * or Número de Identificación de Extranjero (NIE), persons only) + * Verify TIN validity by calculating check (last) character + */ + + +function esEsCheck(tin) { + // Split characters into an array for further processing + var chars = tin.toUpperCase().split(''); // Replace initial letter if needed + + if (isNaN(parseInt(chars[0], 10)) && chars.length > 1) { + var lead_replace = 0; + + switch (chars[0]) { + case 'Y': + lead_replace = 1; + break; + + case 'Z': + lead_replace = 2; + break; + + default: + } + + chars.splice(0, 1, lead_replace); // Fill with zeros if smaller than proper + } else { + while (chars.length < 9) { + chars.unshift(0); + } + } // Calculate checksum and check according to lookup + + + var lookup = ['T', 'R', 'W', 'A', 'G', 'M', 'Y', 'F', 'P', 'D', 'X', 'B', 'N', 'J', 'Z', 'S', 'Q', 'V', 'H', 'L', 'C', 'K', 'E']; + chars = chars.join(''); + var checksum = parseInt(chars.slice(0, 8), 10) % 23; + return chars[8] === lookup[checksum]; +} +/* + * et-EE validation function + * (Isikukood (IK), persons only) + * Checks if birth date (century digit and six following) is valid and calculates check (last) digit + * Material not in DG TAXUD document sourced from: + * - `https://www.oecd.org/tax/automatic-exchange/crs-implementation-and-assistance/tax-identification-numbers/Estonia-TIN.pdf` + */ + + +function etEeCheck(tin) { + // Extract year and add century + var full_year = tin.slice(1, 3); + var century_digit = tin.slice(0, 1); + + switch (century_digit) { + case '1': + case '2': + full_year = "18".concat(full_year); + break; + + case '3': + case '4': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(3, 5), "/").concat(tin.slice(5, 7)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Split digits into an array for further processing + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 0; + var weight = 1; // Multiply by weight and add to checksum + + for (var i = 0; i < 10; i++) { + checksum += digits[i] * weight; + weight += 1; + + if (weight === 10) { + weight = 1; + } + } // Do again if modulo 11 of checksum is 10 + + + if (checksum % 11 === 10) { + checksum = 0; + weight = 3; + + for (var _i3 = 0; _i3 < 10; _i3++) { + checksum += digits[_i3] * weight; + weight += 1; + + if (weight === 10) { + weight = 1; + } + } + + if (checksum % 11 === 10) { + return digits[10] === 0; + } + } + + return checksum % 11 === digits[10]; +} +/* + * fi-FI validation function + * (Henkilötunnus (HETU), persons only) + * Checks if birth date (first six digits plus century symbol) is valid + * and calculates check (last) digit + */ + + +function fiFiCheck(tin) { + // Extract year and add century + var full_year = tin.slice(4, 6); + var century_symbol = tin.slice(6, 7); + + switch (century_symbol) { + case '+': + full_year = "18".concat(full_year); + break; + + case '-': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(2, 4), "/").concat(tin.slice(0, 2)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Calculate check character + + + var checksum = parseInt(tin.slice(0, 6) + tin.slice(7, 10), 10) % 31; + + if (checksum < 10) { + return checksum === parseInt(tin.slice(10), 10); + } + + checksum -= 10; + var letters_lookup = ['A', 'B', 'C', 'D', 'E', 'F', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y']; + return letters_lookup[checksum] === tin.slice(10); +} +/* + * fr/nl-BE validation function + * (Numéro national (N.N.), persons only) + * Checks if birth date (first six digits) is valid and calculates check (last two) digits + */ + + +function frBeCheck(tin) { + // Zero month/day value is acceptable + if (tin.slice(2, 4) !== '00' || tin.slice(4, 6) !== '00') { + // Extract date from first six digits of TIN + var date = "".concat(tin.slice(0, 2), "/").concat(tin.slice(2, 4), "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YY/MM/DD')) { + return false; + } + } + + var checksum = 97 - parseInt(tin.slice(0, 9), 10) % 97; + var checkdigits = parseInt(tin.slice(9, 11), 10); + + if (checksum !== checkdigits) { + checksum = 97 - parseInt("2".concat(tin.slice(0, 9)), 10) % 97; + + if (checksum !== checkdigits) { + return false; + } + } + + return true; +} +/* + * fr-FR validation function + * (Numéro fiscal de référence (numéro SPI), persons only) + * Verify TIN validity by calculating check (last three) digits + */ + + +function frFrCheck(tin) { + tin = tin.replace(/\s/g, ''); + var checksum = parseInt(tin.slice(0, 10), 10) % 511; + var checkdigits = parseInt(tin.slice(10, 13), 10); + return checksum === checkdigits; +} +/* + * fr/lb-LU validation function + * (numéro d’identification personnelle, persons only) + * Verify birth date validity and run Luhn and Verhoeff checks + */ + + +function frLuCheck(tin) { + // Extract date and check validity + var date = "".concat(tin.slice(0, 4), "/").concat(tin.slice(4, 6), "/").concat(tin.slice(6, 8)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Run Luhn check + + + if (!luhnCheck(tin.slice(0, 12))) { + return false; + } // Remove Luhn check digit and run Verhoeff check + + + return verhoeffCheck("".concat(tin.slice(0, 11)).concat(tin[12])); +} +/* + * hr-HR validation function + * (Osobni identifikacijski broj (OIB), persons/entities) + * Verify TIN validity by calling iso7064Check(digits) + */ + + +function hrHrCheck(tin) { + return iso7064Check(tin); +} +/* + * hu-HU validation function + * (Adóazonosító jel, persons only) + * Verify TIN validity by calculating check (last) digit + */ + + +function huHuCheck(tin) { + // split digits into an array for further processing + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var checksum = 8; + + for (var i = 1; i < 9; i++) { + checksum += digits[i] * (i + 1); + } + + return checksum % 11 === digits[9]; +} +/* + * lt-LT validation function (should go here if needed) + * (Asmens kodas, persons/entities respectively) + * Current validation check is alias of etEeCheck- same format applies + */ + +/* + * it-IT first/last name validity check + * Accepts it-IT TIN-encoded names as a three-element character array and checks their validity + * Due to lack of clarity between resources ("Are only Italian consonants used? + * What happens if a person has X in their name?" etc.) only two test conditions + * have been implemented: + * Vowels may only be followed by other vowels or an X character + * and X characters after vowels may only be followed by other X characters. + */ + + +function itItNameCheck(name) { + // true at the first occurence of a vowel + var vowelflag = false; // true at the first occurence of an X AFTER vowel + // (to properly handle last names with X as consonant) + + var xflag = false; + + for (var i = 0; i < 3; i++) { + if (!vowelflag && /[AEIOU]/.test(name[i])) { + vowelflag = true; + } else if (!xflag && vowelflag && name[i] === 'X') { + xflag = true; + } else if (i > 0) { + if (vowelflag && !xflag) { + if (!/[AEIOU]/.test(name[i])) { + return false; + } + } + + if (xflag) { + if (!/X/.test(name[i])) { + return false; + } + } + } + } + + return true; +} +/* + * it-IT validation function + * (Codice fiscale (TIN-IT), persons only) + * Verify name, birth date and codice catastale validity + * and calculate check character. + * Material not in DG-TAXUD document sourced from: + * `https://en.wikipedia.org/wiki/Italian_fiscal_code` + */ + + +function itItCheck(tin) { + // Capitalize and split characters into an array for further processing + var chars = tin.toUpperCase().split(''); // Check first and last name validity calling itItNameCheck() + + if (!itItNameCheck(chars.slice(0, 3))) { + return false; + } + + if (!itItNameCheck(chars.slice(3, 6))) { + return false; + } // Convert letters in number spaces back to numbers if any + + + var number_locations = [6, 7, 9, 10, 12, 13, 14]; + var number_replace = { + L: '0', + M: '1', + N: '2', + P: '3', + Q: '4', + R: '5', + S: '6', + T: '7', + U: '8', + V: '9' + }; + + for (var _i4 = 0, _number_locations = number_locations; _i4 < _number_locations.length; _i4++) { + var i = _number_locations[_i4]; + + if (chars[i] in number_replace) { + chars.splice(i, 1, number_replace[chars[i]]); + } + } // Extract month and day, and check date validity + + + var month_replace = { + A: '01', + B: '02', + C: '03', + D: '04', + E: '05', + H: '06', + L: '07', + M: '08', + P: '09', + R: '10', + S: '11', + T: '12' + }; + var month = month_replace[chars[8]]; + var day = parseInt(chars[9] + chars[10], 10); + + if (day > 40) { + day -= 40; + } + + if (day < 10) { + day = "0".concat(day); + } + + var date = "".concat(chars[6]).concat(chars[7], "/").concat(month, "/").concat(day); + + if (!isDate(date, 'YY/MM/DD')) { + return false; + } // Calculate check character by adding up even and odd characters as numbers + + + var checksum = 0; + + for (var _i5 = 1; _i5 < chars.length - 1; _i5 += 2) { + var char_to_int = parseInt(chars[_i5], 10); + + if (isNaN(char_to_int)) { + char_to_int = chars[_i5].charCodeAt(0) - 65; + } + + checksum += char_to_int; + } + + var odd_convert = { + // Maps of characters at odd places + A: 1, + B: 0, + C: 5, + D: 7, + E: 9, + F: 13, + G: 15, + H: 17, + I: 19, + J: 21, + K: 2, + L: 4, + M: 18, + N: 20, + O: 11, + P: 3, + Q: 6, + R: 8, + S: 12, + T: 14, + U: 16, + V: 10, + W: 22, + X: 25, + Y: 24, + Z: 23, + 0: 1, + 1: 0 + }; + + for (var _i6 = 0; _i6 < chars.length - 1; _i6 += 2) { + var _char_to_int = 0; + + if (chars[_i6] in odd_convert) { + _char_to_int = odd_convert[chars[_i6]]; + } else { + var multiplier = parseInt(chars[_i6], 10); + _char_to_int = 2 * multiplier + 1; + + if (multiplier > 4) { + _char_to_int += 2; + } + } + + checksum += _char_to_int; + } + + if (String.fromCharCode(65 + checksum % 26) !== chars[15]) { + return false; + } + + return true; +} +/* + * lv-LV validation function + * (Personas kods (PK), persons only) + * Check validity of birth date and calculate check (last) digit + * Support only for old format numbers (not starting with '32', issued before 2017/07/01) + * Material not in DG TAXUD document sourced from: + * `https://boot.ritakafija.lv/forums/index.php?/topic/88314-personas-koda-algoritms-%C4%8Deksumma/` + */ + + +function lvLvCheck(tin) { + tin = tin.replace(/\W/, ''); // Extract date from TIN + + var day = tin.slice(0, 2); + + if (day !== '32') { + // No date/checksum check if new format + var month = tin.slice(2, 4); + + if (month !== '00') { + // No date check if unknown month + var full_year = tin.slice(4, 6); + + switch (tin[6]) { + case '0': + full_year = "18".concat(full_year); + break; + + case '1': + full_year = "19".concat(full_year); + break; + + default: + full_year = "20".concat(full_year); + break; + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(2, 4), "/").concat(day); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } + } // Calculate check digit + + + var checksum = 1101; + var multip_lookup = [1, 6, 3, 7, 9, 10, 5, 8, 4, 2]; + + for (var i = 0; i < tin.length - 1; i++) { + checksum -= parseInt(tin[i], 10) * multip_lookup[i]; + } + + return parseInt(tin[10], 10) === checksum % 11; + } + + return true; +} +/* + * mt-MT validation function + * (Identity Card Number or Unique Taxpayer Reference, persons/entities) + * Verify Identity Card Number structure (no other tests found) + */ + + +function mtMtCheck(tin) { + if (tin.length !== 9) { + // No tests for UTR + var chars = tin.toUpperCase().split(''); // Fill with zeros if smaller than proper + + while (chars.length < 8) { + chars.unshift(0); + } // Validate format according to last character + + + switch (tin[7]) { + case 'A': + case 'P': + if (parseInt(chars[6], 10) === 0) { + return false; + } + + break; + + default: + { + var first_part = parseInt(chars.join('').slice(0, 5), 10); + + if (first_part > 32000) { + return false; + } + + var second_part = parseInt(chars.join('').slice(5, 7), 10); + + if (first_part === second_part) { + return false; + } + } + } + } + + return true; +} +/* + * nl-NL validation function + * (Burgerservicenummer (BSN) or Rechtspersonen Samenwerkingsverbanden Informatie Nummer (RSIN), + * persons/entities respectively) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function nlNlCheck(tin) { + return reverseMultiplyAndSum(tin.split('').slice(0, 8).map(function (a) { + return parseInt(a, 10); + }), 9) % 11 === parseInt(tin[8], 10); +} +/* + * pl-PL validation function + * (Powszechny Elektroniczny System Ewidencji Ludności (PESEL) + * or Numer identyfikacji podatkowej (NIP), persons/entities) + * Verify TIN validity by validating birth date (PESEL) and calculating check (last) digit + */ + + +function plPlCheck(tin) { + // NIP + if (tin.length === 10) { + // Calculate last digit by multiplying with lookup + var lookup = [6, 5, 7, 2, 3, 4, 5, 6, 7]; + var _checksum = 0; + + for (var i = 0; i < lookup.length; i++) { + _checksum += parseInt(tin[i], 10) * lookup[i]; + } + + _checksum %= 11; + + if (_checksum === 10) { + return false; + } + + return _checksum === parseInt(tin[9], 10); + } // PESEL + // Extract full year using month + + + var full_year = tin.slice(0, 2); + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 80) { + full_year = "18".concat(full_year); + month -= 80; + } else if (month > 60) { + full_year = "22".concat(full_year); + month -= 60; + } else if (month > 40) { + full_year = "21".concat(full_year); + month -= 40; + } else if (month > 20) { + full_year = "20".concat(full_year); + month -= 20; + } else { + full_year = "19".concat(full_year); + } // Add leading zero to month if needed + + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Calculate last digit by mulitplying with odd one-digit numbers except 5 + + + var checksum = 0; + var multiplier = 1; + + for (var _i7 = 0; _i7 < tin.length - 1; _i7++) { + checksum += parseInt(tin[_i7], 10) * multiplier % 10; + multiplier += 2; + + if (multiplier > 10) { + multiplier = 1; + } else if (multiplier === 5) { + multiplier += 2; + } + } + + checksum = 10 - checksum % 10; + return checksum === parseInt(tin[10], 10); +} +/* +* pt-BR validation function +* (Cadastro de Pessoas Físicas (CPF, persons) +* Cadastro Nacional de Pessoas Jurídicas (CNPJ, entities) +* Both inputs will be validated +*/ + + +function ptBrCheck(tin) { + if (tin.length === 11) { + var _sum; + + var remainder; + _sum = 0; + if ( // Reject known invalid CPFs + tin === '11111111111' || tin === '22222222222' || tin === '33333333333' || tin === '44444444444' || tin === '55555555555' || tin === '66666666666' || tin === '77777777777' || tin === '88888888888' || tin === '99999999999' || tin === '00000000000') return false; + + for (var i = 1; i <= 9; i++) { + _sum += parseInt(tin.substring(i - 1, i), 10) * (11 - i); + } + + remainder = _sum * 10 % 11; + if (remainder === 10) remainder = 0; + if (remainder !== parseInt(tin.substring(9, 10), 10)) return false; + _sum = 0; + + for (var _i8 = 1; _i8 <= 10; _i8++) { + _sum += parseInt(tin.substring(_i8 - 1, _i8), 10) * (12 - _i8); + } + + remainder = _sum * 10 % 11; + if (remainder === 10) remainder = 0; + if (remainder !== parseInt(tin.substring(10, 11), 10)) return false; + return true; + } + + if ( // Reject know invalid CNPJs + tin === '00000000000000' || tin === '11111111111111' || tin === '22222222222222' || tin === '33333333333333' || tin === '44444444444444' || tin === '55555555555555' || tin === '66666666666666' || tin === '77777777777777' || tin === '88888888888888' || tin === '99999999999999') { + return false; + } + + var length = tin.length - 2; + var identifiers = tin.substring(0, length); + var verificators = tin.substring(length); + var sum = 0; + var pos = length - 7; + + for (var _i9 = length; _i9 >= 1; _i9--) { + sum += identifiers.charAt(length - _i9) * pos; + pos -= 1; + + if (pos < 2) { + pos = 9; + } + } + + var result = sum % 11 < 2 ? 0 : 11 - sum % 11; + + if (result !== parseInt(verificators.charAt(0), 10)) { + return false; + } + + length += 1; + identifiers = tin.substring(0, length); + sum = 0; + pos = length - 7; + + for (var _i10 = length; _i10 >= 1; _i10--) { + sum += identifiers.charAt(length - _i10) * pos; + pos -= 1; + + if (pos < 2) { + pos = 9; + } + } + + result = sum % 11 < 2 ? 0 : 11 - sum % 11; + + if (result !== parseInt(verificators.charAt(1), 10)) { + return false; + } + + return true; +} +/* + * pt-PT validation function + * (Número de identificação fiscal (NIF), persons/entities) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function ptPtCheck(tin) { + var checksum = 11 - reverseMultiplyAndSum(tin.split('').slice(0, 8).map(function (a) { + return parseInt(a, 10); + }), 9) % 11; + + if (checksum > 9) { + return parseInt(tin[8], 10) === 0; + } + + return checksum === parseInt(tin[8], 10); +} +/* + * ro-RO validation function + * (Cod Numeric Personal (CNP) or Cod de înregistrare fiscală (CIF), + * persons only) + * Verify CNP validity by calculating check (last) digit (test not found for CIF) + * Material not in DG TAXUD document sourced from: + * `https://en.wikipedia.org/wiki/National_identification_number#Romania` + */ + + +function roRoCheck(tin) { + if (tin.slice(0, 4) !== '9000') { + // No test found for this format + // Extract full year using century digit if possible + var full_year = tin.slice(1, 3); + + switch (tin[0]) { + case '1': + case '2': + full_year = "19".concat(full_year); + break; + + case '3': + case '4': + full_year = "18".concat(full_year); + break; + + case '5': + case '6': + full_year = "20".concat(full_year); + break; + + default: + } // Check date validity + + + var date = "".concat(full_year, "/").concat(tin.slice(3, 5), "/").concat(tin.slice(5, 7)); + + if (date.length === 8) { + if (!isDate(date, 'YY/MM/DD')) { + return false; + } + } else if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } // Calculate check digit + + + var digits = tin.split('').map(function (a) { + return parseInt(a, 10); + }); + var multipliers = [2, 7, 9, 1, 4, 6, 3, 5, 8, 2, 7, 9]; + var checksum = 0; + + for (var i = 0; i < multipliers.length; i++) { + checksum += digits[i] * multipliers[i]; + } + + if (checksum % 11 === 10) { + return digits[12] === 1; + } + + return digits[12] === checksum % 11; + } + + return true; +} +/* + * sk-SK validation function + * (Rodné číslo (RČ) or bezvýznamové identifikačné číslo (BIČ), persons only) + * Checks validity of pre-1954 birth numbers (rodné číslo) only + * Due to the introduction of the pseudo-random BIČ it is not possible to test + * post-1954 birth numbers without knowing whether they are BIČ or RČ beforehand + */ + + +function skSkCheck(tin) { + if (tin.length === 9) { + tin = tin.replace(/\W/, ''); + + if (tin.slice(6) === '000') { + return false; + } // Three-zero serial not assigned before 1954 + // Extract full year from TIN length + + + var full_year = parseInt(tin.slice(0, 2), 10); + + if (full_year > 53) { + return false; + } + + if (full_year < 10) { + full_year = "190".concat(full_year); + } else { + full_year = "19".concat(full_year); + } // Extract month from TIN and normalize + + + var month = parseInt(tin.slice(2, 4), 10); + + if (month > 50) { + month -= 50; + } + + if (month < 10) { + month = "0".concat(month); + } // Check date validity + + + var date = "".concat(full_year, "/").concat(month, "/").concat(tin.slice(4, 6)); + + if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } + } + + return true; +} +/* + * sl-SI validation function + * (Davčna številka, persons/entities) + * Verify TIN validity by calculating check (last) digit (variant of MOD 11) + */ + + +function slSiCheck(tin) { + var checksum = 11 - reverseMultiplyAndSum(tin.split('').slice(0, 7).map(function (a) { + return parseInt(a, 10); + }), 8) % 11; + + if (checksum === 10) { + return parseInt(tin[7], 10) === 0; + } + + return checksum === parseInt(tin[7], 10); +} +/* + * sv-SE validation function + * (Personnummer or samordningsnummer, persons only) + * Checks validity of birth date and calls luhnCheck() to validate check (last) digit + */ + + +function svSeCheck(tin) { + // Make copy of TIN and normalize to two-digit year form + var tin_copy = tin.slice(0); + + if (tin.length > 11) { + tin_copy = tin_copy.slice(2); + } // Extract date of birth + + + var full_year = ''; + var month = tin_copy.slice(2, 4); + var day = parseInt(tin_copy.slice(4, 6), 10); + + if (tin.length > 11) { + full_year = tin.slice(0, 4); + } else { + full_year = tin.slice(0, 2); + + if (tin.length === 11 && day < 60) { + // Extract full year from centenarian symbol + // Should work just fine until year 10000 or so + var current_year = new Date().getFullYear().toString(); + var current_century = parseInt(current_year.slice(0, 2), 10); + current_year = parseInt(current_year, 10); + + if (tin[6] === '-') { + if (parseInt("".concat(current_century).concat(full_year), 10) > current_year) { + full_year = "".concat(current_century - 1).concat(full_year); + } else { + full_year = "".concat(current_century).concat(full_year); + } + } else { + full_year = "".concat(current_century - 1).concat(full_year); + + if (current_year - parseInt(full_year, 10) < 100) { + return false; + } + } + } + } // Normalize day and check date validity + + + if (day > 60) { + day -= 60; + } + + if (day < 10) { + day = "0".concat(day); + } + + var date = "".concat(full_year, "/").concat(month, "/").concat(day); + + if (date.length === 8) { + if (!isDate(date, 'YY/MM/DD')) { + return false; + } + } else if (!isDate(date, 'YYYY/MM/DD')) { + return false; + } + + return luhnCheck(tin.replace(/\W/, '')); +} // Locale lookup objects + +/* + * Tax id regex formats for various locales + * + * Where not explicitly specified in DG-TAXUD document both + * uppercase and lowercase letters are acceptable. + */ + + +var taxIdFormat = { + 'bg-BG': /^\d{10}$/, + 'cs-CZ': /^\d{6}\/{0,1}\d{3,4}$/, + 'de-AT': /^\d{9}$/, + 'de-DE': /^[1-9]\d{10}$/, + 'dk-DK': /^\d{6}-{0,1}\d{4}$/, + 'el-CY': /^[09]\d{7}[A-Z]$/, + 'el-GR': /^([0-4]|[7-9])\d{8}$/, + 'en-GB': /^\d{10}$|^(?!GB|NK|TN|ZZ)(?![DFIQUV])[A-Z](?![DFIQUVO])[A-Z]\d{6}[ABCD ]$/i, + 'en-IE': /^\d{7}[A-W][A-IW]{0,1}$/i, + 'en-US': /^\d{2}[- ]{0,1}\d{7}$/, + 'es-ES': /^(\d{0,8}|[XYZKLM]\d{7})[A-HJ-NP-TV-Z]$/i, + 'et-EE': /^[1-6]\d{6}(00[1-9]|0[1-9][0-9]|[1-6][0-9]{2}|70[0-9]|710)\d$/, + 'fi-FI': /^\d{6}[-+A]\d{3}[0-9A-FHJ-NPR-Y]$/i, + 'fr-BE': /^\d{11}$/, + 'fr-FR': /^[0-3]\d{12}$|^[0-3]\d\s\d{2}(\s\d{3}){3}$/, + // Conforms both to official spec and provided example + 'fr-LU': /^\d{13}$/, + 'hr-HR': /^\d{11}$/, + 'hu-HU': /^8\d{9}$/, + 'it-IT': /^[A-Z]{6}[L-NP-V0-9]{2}[A-EHLMPRST][L-NP-V0-9]{2}[A-ILMZ][L-NP-V0-9]{3}[A-Z]$/i, + 'lv-LV': /^\d{6}-{0,1}\d{5}$/, + // Conforms both to DG TAXUD spec and original research + 'mt-MT': /^\d{3,7}[APMGLHBZ]$|^([1-8])\1\d{7}$/i, + 'nl-NL': /^\d{9}$/, + 'pl-PL': /^\d{10,11}$/, + 'pt-BR': /(?:^\d{11}$)|(?:^\d{14}$)/, + 'pt-PT': /^\d{9}$/, + 'ro-RO': /^\d{13}$/, + 'sk-SK': /^\d{6}\/{0,1}\d{3,4}$/, + 'sl-SI': /^[1-9]\d{7}$/, + 'sv-SE': /^(\d{6}[-+]{0,1}\d{4}|(18|19|20)\d{6}[-+]{0,1}\d{4})$/ +}; // taxIdFormat locale aliases + +taxIdFormat['lb-LU'] = taxIdFormat['fr-LU']; +taxIdFormat['lt-LT'] = taxIdFormat['et-EE']; +taxIdFormat['nl-BE'] = taxIdFormat['fr-BE']; // Algorithmic tax id check functions for various locales + +var taxIdCheck = { + 'bg-BG': bgBgCheck, + 'cs-CZ': csCzCheck, + 'de-AT': deAtCheck, + 'de-DE': deDeCheck, + 'dk-DK': dkDkCheck, + 'el-CY': elCyCheck, + 'el-GR': elGrCheck, + 'en-IE': enIeCheck, + 'en-US': enUsCheck, + 'es-ES': esEsCheck, + 'et-EE': etEeCheck, + 'fi-FI': fiFiCheck, + 'fr-BE': frBeCheck, + 'fr-FR': frFrCheck, + 'fr-LU': frLuCheck, + 'hr-HR': hrHrCheck, + 'hu-HU': huHuCheck, + 'it-IT': itItCheck, + 'lv-LV': lvLvCheck, + 'mt-MT': mtMtCheck, + 'nl-NL': nlNlCheck, + 'pl-PL': plPlCheck, + 'pt-BR': ptBrCheck, + 'pt-PT': ptPtCheck, + 'ro-RO': roRoCheck, + 'sk-SK': skSkCheck, + 'sl-SI': slSiCheck, + 'sv-SE': svSeCheck +}; // taxIdCheck locale aliases + +taxIdCheck['lb-LU'] = taxIdCheck['fr-LU']; +taxIdCheck['lt-LT'] = taxIdCheck['et-EE']; +taxIdCheck['nl-BE'] = taxIdCheck['fr-BE']; // Regexes for locales where characters should be omitted before checking format + +var allsymbols = /[-\\\/!@#$%\^&\*\(\)\+\=\[\]]+/g; +var sanitizeRegexes = { + 'de-AT': allsymbols, + 'de-DE': /[\/\\]/g, + 'fr-BE': allsymbols +}; // sanitizeRegexes locale aliases + +sanitizeRegexes['nl-BE'] = sanitizeRegexes['fr-BE']; +/* + * Validator function + * Return true if the passed string is a valid tax identification number + * for the specified locale. + * Throw an error exception if the locale is not supported. + */ + +function isTaxID(str) { + var locale = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'en-US'; + assertString(str); // Copy TIN to avoid replacement if sanitized + + var strcopy = str.slice(0); + + if (locale in taxIdFormat) { + if (locale in sanitizeRegexes) { + strcopy = strcopy.replace(sanitizeRegexes[locale], ''); + } + + if (!taxIdFormat[locale].test(strcopy)) { + return false; + } + + if (locale in taxIdCheck) { + return taxIdCheck[locale](strcopy); + } // Fallthrough; not all locales have algorithmic checks + + + return true; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +/* eslint-disable max-len */ + +var phones = { + 'am-AM': /^(\+?374|0)((10|[9|7][0-9])\d{6}$|[2-4]\d{7}$)/, + 'ar-AE': /^((\+?971)|0)?5[024568]\d{7}$/, + 'ar-BH': /^(\+?973)?(3|6)\d{7}$/, + 'ar-DZ': /^(\+?213|0)(5|6|7)\d{8}$/, + 'ar-LB': /^(\+?961)?((3|81)\d{6}|7\d{7})$/, + 'ar-EG': /^((\+?20)|0)?1[0125]\d{8}$/, + 'ar-IQ': /^(\+?964|0)?7[0-9]\d{8}$/, + 'ar-JO': /^(\+?962|0)?7[789]\d{7}$/, + 'ar-KW': /^(\+?965)[569]\d{7}$/, + 'ar-LY': /^((\+?218)|0)?(9[1-6]\d{7}|[1-8]\d{7,9})$/, + 'ar-MA': /^(?:(?:\+|00)212|0)[5-7]\d{8}$/, + 'ar-OM': /^((\+|00)968)?(9[1-9])\d{6}$/, + 'ar-PS': /^(\+?970|0)5[6|9](\d{7})$/, + 'ar-SA': /^(!?(\+?966)|0)?5\d{8}$/, + 'ar-SY': /^(!?(\+?963)|0)?9\d{8}$/, + 'ar-TN': /^(\+?216)?[2459]\d{7}$/, + 'az-AZ': /^(\+994|0)(5[015]|7[07]|99)\d{7}$/, + 'bs-BA': /^((((\+|00)3876)|06))((([0-3]|[5-6])\d{6})|(4\d{7}))$/, + 'be-BY': /^(\+?375)?(24|25|29|33|44)\d{7}$/, + 'bg-BG': /^(\+?359|0)?8[789]\d{7}$/, + 'bn-BD': /^(\+?880|0)1[13456789][0-9]{8}$/, + 'ca-AD': /^(\+376)?[346]\d{5}$/, + 'cs-CZ': /^(\+?420)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/, + 'da-DK': /^(\+?45)?\s?\d{2}\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'de-DE': /^((\+49|0)[1|3])([0|5][0-45-9]\d|6([23]|0\d?)|7([0-57-9]|6\d))\d{7,9}$/, + 'de-AT': /^(\+43|0)\d{1,4}\d{3,12}$/, + 'de-CH': /^(\+41|0)([1-9])\d{1,9}$/, + 'de-LU': /^(\+352)?((6\d1)\d{6})$/, + 'dv-MV': /^(\+?960)?(7[2-9]|91|9[3-9])\d{7}$/, + 'el-GR': /^(\+?30|0)?(69\d{8})$/, + 'en-AU': /^(\+?61|0)4\d{8}$/, + 'en-BM': /^(\+?1)?441(((3|7)\d{6}$)|(5[0-3][0-9]\d{4}$)|(59\d{5}))/, + 'en-GB': /^(\+?44|0)7\d{9}$/, + 'en-GG': /^(\+?44|0)1481\d{6}$/, + 'en-GH': /^(\+233|0)(20|50|24|54|27|57|26|56|23|28|55|59)\d{7}$/, + 'en-GY': /^(\+592|0)6\d{6}$/, + 'en-HK': /^(\+?852[-\s]?)?[456789]\d{3}[-\s]?\d{4}$/, + 'en-MO': /^(\+?853[-\s]?)?[6]\d{3}[-\s]?\d{4}$/, + 'en-IE': /^(\+?353|0)8[356789]\d{7}$/, + 'en-IN': /^(\+?91|0)?[6789]\d{9}$/, + 'en-KE': /^(\+?254|0)(7|1)\d{8}$/, + 'en-KI': /^((\+686|686)?)?( )?((6|7)(2|3|8)[0-9]{6})$/, + 'en-MT': /^(\+?356|0)?(99|79|77|21|27|22|25)[0-9]{6}$/, + 'en-MU': /^(\+?230|0)?\d{8}$/, + 'en-NA': /^(\+?264|0)(6|8)\d{7}$/, + 'en-NG': /^(\+?234|0)?[789]\d{9}$/, + 'en-NZ': /^(\+?64|0)[28]\d{7,9}$/, + 'en-PK': /^((00|\+)?92|0)3[0-6]\d{8}$/, + 'en-PH': /^(09|\+639)\d{9}$/, + 'en-RW': /^(\+?250|0)?[7]\d{8}$/, + 'en-SG': /^(\+65)?[3689]\d{7}$/, + 'en-SL': /^(\+?232|0)\d{8}$/, + 'en-TZ': /^(\+?255|0)?[67]\d{8}$/, + 'en-UG': /^(\+?256|0)?[7]\d{8}$/, + 'en-US': /^((\+1|1)?( |-)?)?(\([2-9][0-9]{2}\)|[2-9][0-9]{2})( |-)?([2-9][0-9]{2}( |-)?[0-9]{4})$/, + 'en-ZA': /^(\+?27|0)\d{9}$/, + 'en-ZM': /^(\+?26)?09[567]\d{7}$/, + 'en-ZW': /^(\+263)[0-9]{9}$/, + 'en-BW': /^(\+?267)?(7[1-8]{1})\d{6}$/, + 'es-AR': /^\+?549(11|[2368]\d)\d{8}$/, + 'es-BO': /^(\+?591)?(6|7)\d{7}$/, + 'es-CO': /^(\+?57)?3(0(0|1|2|4|5)|1\d|2[0-4]|5(0|1))\d{7}$/, + 'es-CL': /^(\+?56|0)[2-9]\d{1}\d{7}$/, + 'es-CR': /^(\+506)?[2-8]\d{7}$/, + 'es-CU': /^(\+53|0053)?5\d{7}/, + 'es-DO': /^(\+?1)?8[024]9\d{7}$/, + 'es-HN': /^(\+?504)?[9|8]\d{7}$/, + 'es-EC': /^(\+?593|0)([2-7]|9[2-9])\d{7}$/, + 'es-ES': /^(\+?34)?[6|7]\d{8}$/, + 'es-PE': /^(\+?51)?9\d{8}$/, + 'es-MX': /^(\+?52)?(1|01)?\d{10,11}$/, + 'es-PA': /^(\+?507)\d{7,8}$/, + 'es-PY': /^(\+?595|0)9[9876]\d{7}$/, + 'es-SV': /^(\+?503)?[67]\d{7}$/, + 'es-UY': /^(\+598|0)9[1-9][\d]{6}$/, + 'es-VE': /^(\+?58)?(2|4)\d{9}$/, + 'et-EE': /^(\+?372)?\s?(5|8[1-4])\s?([0-9]\s?){6,7}$/, + 'fa-IR': /^(\+?98[\-\s]?|0)9[0-39]\d[\-\s]?\d{3}[\-\s]?\d{4}$/, + 'fi-FI': /^(\+?358|0)\s?(4(0|1|2|4|5|6)?|50)\s?(\d\s?){4,8}\d$/, + 'fj-FJ': /^(\+?679)?\s?\d{3}\s?\d{4}$/, + 'fo-FO': /^(\+?298)?\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'fr-BF': /^(\+226|0)[67]\d{7}$/, + 'fr-CM': /^(\+?237)6[0-9]{8}$/, + 'fr-FR': /^(\+?33|0)[67]\d{8}$/, + 'fr-GF': /^(\+?594|0|00594)[67]\d{8}$/, + 'fr-GP': /^(\+?590|0|00590)[67]\d{8}$/, + 'fr-MQ': /^(\+?596|0|00596)[67]\d{8}$/, + 'fr-PF': /^(\+?689)?8[789]\d{6}$/, + 'fr-RE': /^(\+?262|0|00262)[67]\d{8}$/, + 'he-IL': /^(\+972|0)([23489]|5[012345689]|77)[1-9]\d{6}$/, + 'hu-HU': /^(\+?36|06)(20|30|31|50|70)\d{7}$/, + 'id-ID': /^(\+?62|0)8(1[123456789]|2[1238]|3[1238]|5[12356789]|7[78]|9[56789]|8[123456789])([\s?|\d]{5,11})$/, + 'it-IT': /^(\+?39)?\s?3\d{2} ?\d{6,7}$/, + 'it-SM': /^((\+378)|(0549)|(\+390549)|(\+3780549))?6\d{5,9}$/, + 'ja-JP': /^(\+81[ \-]?(\(0\))?|0)[6789]0[ \-]?\d{4}[ \-]?\d{4}$/, + 'ka-GE': /^(\+?995)?(5|79)\d{7}$/, + 'kk-KZ': /^(\+?7|8)?7\d{9}$/, + 'kl-GL': /^(\+?299)?\s?\d{2}\s?\d{2}\s?\d{2}$/, + 'ko-KR': /^((\+?82)[ \-]?)?0?1([0|1|6|7|8|9]{1})[ \-]?\d{3,4}[ \-]?\d{4}$/, + 'lt-LT': /^(\+370|8)\d{8}$/, + 'lv-LV': /^(\+?371)2\d{7}$/, + 'ms-MY': /^(\+?6?01){1}(([0145]{1}(\-|\s)?\d{7,8})|([236789]{1}(\s|\-)?\d{7}))$/, + 'mz-MZ': /^(\+?258)?8[234567]\d{7}$/, + 'nb-NO': /^(\+?47)?[49]\d{7}$/, + 'ne-NP': /^(\+?977)?9[78]\d{8}$/, + 'nl-BE': /^(\+?32|0)4\d{8}$/, + 'nl-NL': /^(((\+|00)?31\(0\))|((\+|00)?31)|0)6{1}\d{8}$/, + 'nn-NO': /^(\+?47)?[49]\d{7}$/, + 'pl-PL': /^(\+?48)? ?[5-8]\d ?\d{3} ?\d{2} ?\d{2}$/, + 'pt-BR': /^((\+?55\ ?[1-9]{2}\ ?)|(\+?55\ ?\([1-9]{2}\)\ ?)|(0[1-9]{2}\ ?)|(\([1-9]{2}\)\ ?)|([1-9]{2}\ ?))((\d{4}\-?\d{4})|(9[2-9]{1}\d{3}\-?\d{4}))$/, + 'pt-PT': /^(\+?351)?9[1236]\d{7}$/, + 'pt-AO': /^(\+244)\d{9}$/, + 'ro-RO': /^(\+?4?0)\s?7\d{2}(\/|\s|\.|\-)?\d{3}(\s|\.|\-)?\d{3}$/, + 'ru-RU': /^(\+?7|8)?9\d{9}$/, + 'si-LK': /^(?:0|94|\+94)?(7(0|1|2|4|5|6|7|8)( |-)?)\d{7}$/, + 'sl-SI': /^(\+386\s?|0)(\d{1}\s?\d{3}\s?\d{2}\s?\d{2}|\d{2}\s?\d{3}\s?\d{3})$/, + 'sk-SK': /^(\+?421)? ?[1-9][0-9]{2} ?[0-9]{3} ?[0-9]{3}$/, + 'sq-AL': /^(\+355|0)6[789]\d{6}$/, + 'sr-RS': /^(\+3816|06)[- \d]{5,9}$/, + 'sv-SE': /^(\+?46|0)[\s\-]?7[\s\-]?[02369]([\s\-]?\d){7}$/, + 'tg-TJ': /^(\+?992)?[5][5]\d{7}$/, + 'th-TH': /^(\+66|66|0)\d{9}$/, + 'tr-TR': /^(\+?90|0)?5\d{9}$/, + 'tk-TM': /^(\+993|993|8)\d{8}$/, + 'uk-UA': /^(\+?38|8)?0\d{9}$/, + 'uz-UZ': /^(\+?998)?(6[125-79]|7[1-69]|88|9\d)\d{7}$/, + 'vi-VN': /^((\+?84)|0)((3([2-9]))|(5([25689]))|(7([0|6-9]))|(8([1-9]))|(9([0-9])))([0-9]{7})$/, + 'zh-CN': /^((\+|00)86)?(1[3-9]|9[28])\d{9}$/, + 'zh-TW': /^(\+?886\-?|0)?9\d{8}$/, + 'dz-BT': /^(\+?975|0)?(17|16|77|02)\d{6}$/ +}; +/* eslint-enable max-len */ +// aliases + +phones['en-CA'] = phones['en-US']; +phones['fr-CA'] = phones['en-CA']; +phones['fr-BE'] = phones['nl-BE']; +phones['zh-HK'] = phones['en-HK']; +phones['zh-MO'] = phones['en-MO']; +phones['ga-IE'] = phones['en-IE']; +phones['fr-CH'] = phones['de-CH']; +phones['it-CH'] = phones['fr-CH']; +function isMobilePhone(str, locale, options) { + assertString(str); + + if (options && options.strictMode && !str.startsWith('+')) { + return false; + } + + if (Array.isArray(locale)) { + return locale.some(function (key) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (phones.hasOwnProperty(key)) { + var phone = phones[key]; + + if (phone.test(str)) { + return true; + } + } + + return false; + }); + } else if (locale in phones) { + return phones[locale].test(str); // alias falsey locale as 'any' + } else if (!locale || locale === 'any') { + for (var key in phones) { + // istanbul ignore else + if (phones.hasOwnProperty(key)) { + var phone = phones[key]; + + if (phone.test(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} +var locales$4 = Object.keys(phones); + +var eth = /^(0x)[0-9a-f]{40}$/i; +function isEthereumAddress(str) { + assertString(str); + return eth.test(str); +} + +function currencyRegex(options) { + var decimal_digits = "\\d{".concat(options.digits_after_decimal[0], "}"); + options.digits_after_decimal.forEach(function (digit, index) { + if (index !== 0) decimal_digits = "".concat(decimal_digits, "|\\d{").concat(digit, "}"); + }); + var symbol = "(".concat(options.symbol.replace(/\W/, function (m) { + return "\\".concat(m); + }), ")").concat(options.require_symbol ? '' : '?'), + negative = '-?', + whole_dollar_amount_without_sep = '[1-9]\\d*', + whole_dollar_amount_with_sep = "[1-9]\\d{0,2}(\\".concat(options.thousands_separator, "\\d{3})*"), + valid_whole_dollar_amounts = ['0', whole_dollar_amount_without_sep, whole_dollar_amount_with_sep], + whole_dollar_amount = "(".concat(valid_whole_dollar_amounts.join('|'), ")?"), + decimal_amount = "(\\".concat(options.decimal_separator, "(").concat(decimal_digits, "))").concat(options.require_decimal ? '' : '?'); + var pattern = whole_dollar_amount + (options.allow_decimal || options.require_decimal ? decimal_amount : ''); // default is negative sign before symbol, but there are two other options (besides parens) + + if (options.allow_negatives && !options.parens_for_negatives) { + if (options.negative_sign_after_digits) { + pattern += negative; + } else if (options.negative_sign_before_digits) { + pattern = negative + pattern; + } + } // South African Rand, for example, uses R 123 (space) and R-123 (no space) + + + if (options.allow_negative_sign_placeholder) { + pattern = "( (?!\\-))?".concat(pattern); + } else if (options.allow_space_after_symbol) { + pattern = " ?".concat(pattern); + } else if (options.allow_space_after_digits) { + pattern += '( (?!$))?'; + } + + if (options.symbol_after_digits) { + pattern += symbol; + } else { + pattern = symbol + pattern; + } + + if (options.allow_negatives) { + if (options.parens_for_negatives) { + pattern = "(\\(".concat(pattern, "\\)|").concat(pattern, ")"); + } else if (!(options.negative_sign_before_digits || options.negative_sign_after_digits)) { + pattern = negative + pattern; + } + } // ensure there's a dollar and/or decimal amount, and that + // it doesn't start with a space or a negative sign followed by a space + + + return new RegExp("^(?!-? )(?=.*\\d)".concat(pattern, "$")); +} + +var default_currency_options = { + symbol: '$', + require_symbol: false, + allow_space_after_symbol: false, + symbol_after_digits: false, + allow_negatives: true, + parens_for_negatives: false, + negative_sign_before_digits: false, + negative_sign_after_digits: false, + allow_negative_sign_placeholder: false, + thousands_separator: ',', + decimal_separator: '.', + allow_decimal: true, + require_decimal: false, + digits_after_decimal: [2], + allow_space_after_digits: false +}; +function isCurrency(str, options) { + assertString(str); + options = merge(options, default_currency_options); + return currencyRegex(options).test(str); +} + +var bech32 = /^(bc1)[a-z0-9]{25,39}$/; +var base58 = /^(1|3)[A-HJ-NP-Za-km-z1-9]{25,39}$/; +function isBtcAddress(str) { + assertString(str); // check for bech32 + + if (str.startsWith('bc1')) { + return bech32.test(str); + } + + return base58.test(str); +} + +/* eslint-disable max-len */ +// from http://goo.gl/0ejHHW + +var iso8601 = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; // same as above, except with a strict 'T' separator between date and time + +var iso8601StrictSeparator = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-3])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; +/* eslint-enable max-len */ + +var isValidDate = function isValidDate(str) { + // str must have passed the ISO8601 check + // this check is meant to catch invalid dates + // like 2009-02-31 + // first check for ordinal dates + var ordinalMatch = str.match(/^(\d{4})-?(\d{3})([ T]{1}\.*|$)/); + + if (ordinalMatch) { + var oYear = Number(ordinalMatch[1]); + var oDay = Number(ordinalMatch[2]); // if is leap year + + if (oYear % 4 === 0 && oYear % 100 !== 0 || oYear % 400 === 0) return oDay <= 366; + return oDay <= 365; + } + + var match = str.match(/(\d{4})-?(\d{0,2})-?(\d*)/).map(Number); + var year = match[1]; + var month = match[2]; + var day = match[3]; + var monthString = month ? "0".concat(month).slice(-2) : month; + var dayString = day ? "0".concat(day).slice(-2) : day; // create a date object and compare + + var d = new Date("".concat(year, "-").concat(monthString || '01', "-").concat(dayString || '01')); + + if (month && day) { + return d.getUTCFullYear() === year && d.getUTCMonth() + 1 === month && d.getUTCDate() === day; + } + + return true; +}; + +function isISO8601(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + assertString(str); + var check = options.strictSeparator ? iso8601StrictSeparator.test(str) : iso8601.test(str); + if (check && options.strict) return isValidDate(str); + return check; +} + +/* Based on https://tools.ietf.org/html/rfc3339#section-5.6 */ + +var dateFullYear = /[0-9]{4}/; +var dateMonth = /(0[1-9]|1[0-2])/; +var dateMDay = /([12]\d|0[1-9]|3[01])/; +var timeHour = /([01][0-9]|2[0-3])/; +var timeMinute = /[0-5][0-9]/; +var timeSecond = /([0-5][0-9]|60)/; +var timeSecFrac = /(\.[0-9]+)?/; +var timeNumOffset = new RegExp("[-+]".concat(timeHour.source, ":").concat(timeMinute.source)); +var timeOffset = new RegExp("([zZ]|".concat(timeNumOffset.source, ")")); +var partialTime = new RegExp("".concat(timeHour.source, ":").concat(timeMinute.source, ":").concat(timeSecond.source).concat(timeSecFrac.source)); +var fullDate = new RegExp("".concat(dateFullYear.source, "-").concat(dateMonth.source, "-").concat(dateMDay.source)); +var fullTime = new RegExp("".concat(partialTime.source).concat(timeOffset.source)); +var rfc3339 = new RegExp("^".concat(fullDate.source, "[ tT]").concat(fullTime.source, "$")); +function isRFC3339(str) { + assertString(str); + return rfc3339.test(str); +} + +var validISO31661Alpha3CountriesCodes = new Set(['AFG', 'ALA', 'ALB', 'DZA', 'ASM', 'AND', 'AGO', 'AIA', 'ATA', 'ATG', 'ARG', 'ARM', 'ABW', 'AUS', 'AUT', 'AZE', 'BHS', 'BHR', 'BGD', 'BRB', 'BLR', 'BEL', 'BLZ', 'BEN', 'BMU', 'BTN', 'BOL', 'BES', 'BIH', 'BWA', 'BVT', 'BRA', 'IOT', 'BRN', 'BGR', 'BFA', 'BDI', 'KHM', 'CMR', 'CAN', 'CPV', 'CYM', 'CAF', 'TCD', 'CHL', 'CHN', 'CXR', 'CCK', 'COL', 'COM', 'COG', 'COD', 'COK', 'CRI', 'CIV', 'HRV', 'CUB', 'CUW', 'CYP', 'CZE', 'DNK', 'DJI', 'DMA', 'DOM', 'ECU', 'EGY', 'SLV', 'GNQ', 'ERI', 'EST', 'ETH', 'FLK', 'FRO', 'FJI', 'FIN', 'FRA', 'GUF', 'PYF', 'ATF', 'GAB', 'GMB', 'GEO', 'DEU', 'GHA', 'GIB', 'GRC', 'GRL', 'GRD', 'GLP', 'GUM', 'GTM', 'GGY', 'GIN', 'GNB', 'GUY', 'HTI', 'HMD', 'VAT', 'HND', 'HKG', 'HUN', 'ISL', 'IND', 'IDN', 'IRN', 'IRQ', 'IRL', 'IMN', 'ISR', 'ITA', 'JAM', 'JPN', 'JEY', 'JOR', 'KAZ', 'KEN', 'KIR', 'PRK', 'KOR', 'KWT', 'KGZ', 'LAO', 'LVA', 'LBN', 'LSO', 'LBR', 'LBY', 'LIE', 'LTU', 'LUX', 'MAC', 'MKD', 'MDG', 'MWI', 'MYS', 'MDV', 'MLI', 'MLT', 'MHL', 'MTQ', 'MRT', 'MUS', 'MYT', 'MEX', 'FSM', 'MDA', 'MCO', 'MNG', 'MNE', 'MSR', 'MAR', 'MOZ', 'MMR', 'NAM', 'NRU', 'NPL', 'NLD', 'NCL', 'NZL', 'NIC', 'NER', 'NGA', 'NIU', 'NFK', 'MNP', 'NOR', 'OMN', 'PAK', 'PLW', 'PSE', 'PAN', 'PNG', 'PRY', 'PER', 'PHL', 'PCN', 'POL', 'PRT', 'PRI', 'QAT', 'REU', 'ROU', 'RUS', 'RWA', 'BLM', 'SHN', 'KNA', 'LCA', 'MAF', 'SPM', 'VCT', 'WSM', 'SMR', 'STP', 'SAU', 'SEN', 'SRB', 'SYC', 'SLE', 'SGP', 'SXM', 'SVK', 'SVN', 'SLB', 'SOM', 'ZAF', 'SGS', 'SSD', 'ESP', 'LKA', 'SDN', 'SUR', 'SJM', 'SWZ', 'SWE', 'CHE', 'SYR', 'TWN', 'TJK', 'TZA', 'THA', 'TLS', 'TGO', 'TKL', 'TON', 'TTO', 'TUN', 'TUR', 'TKM', 'TCA', 'TUV', 'UGA', 'UKR', 'ARE', 'GBR', 'USA', 'UMI', 'URY', 'UZB', 'VUT', 'VEN', 'VNM', 'VGB', 'VIR', 'WLF', 'ESH', 'YEM', 'ZMB', 'ZWE']); +function isISO31661Alpha3(str) { + assertString(str); + return validISO31661Alpha3CountriesCodes.has(str.toUpperCase()); +} + +var validISO4217CurrencyCodes = new Set(['AED', 'AFN', 'ALL', 'AMD', 'ANG', 'AOA', 'ARS', 'AUD', 'AWG', 'AZN', 'BAM', 'BBD', 'BDT', 'BGN', 'BHD', 'BIF', 'BMD', 'BND', 'BOB', 'BOV', 'BRL', 'BSD', 'BTN', 'BWP', 'BYN', 'BZD', 'CAD', 'CDF', 'CHE', 'CHF', 'CHW', 'CLF', 'CLP', 'CNY', 'COP', 'COU', 'CRC', 'CUC', 'CUP', 'CVE', 'CZK', 'DJF', 'DKK', 'DOP', 'DZD', 'EGP', 'ERN', 'ETB', 'EUR', 'FJD', 'FKP', 'GBP', 'GEL', 'GHS', 'GIP', 'GMD', 'GNF', 'GTQ', 'GYD', 'HKD', 'HNL', 'HRK', 'HTG', 'HUF', 'IDR', 'ILS', 'INR', 'IQD', 'IRR', 'ISK', 'JMD', 'JOD', 'JPY', 'KES', 'KGS', 'KHR', 'KMF', 'KPW', 'KRW', 'KWD', 'KYD', 'KZT', 'LAK', 'LBP', 'LKR', 'LRD', 'LSL', 'LYD', 'MAD', 'MDL', 'MGA', 'MKD', 'MMK', 'MNT', 'MOP', 'MRU', 'MUR', 'MVR', 'MWK', 'MXN', 'MXV', 'MYR', 'MZN', 'NAD', 'NGN', 'NIO', 'NOK', 'NPR', 'NZD', 'OMR', 'PAB', 'PEN', 'PGK', 'PHP', 'PKR', 'PLN', 'PYG', 'QAR', 'RON', 'RSD', 'RUB', 'RWF', 'SAR', 'SBD', 'SCR', 'SDG', 'SEK', 'SGD', 'SHP', 'SLL', 'SOS', 'SRD', 'SSP', 'STN', 'SVC', 'SYP', 'SZL', 'THB', 'TJS', 'TMT', 'TND', 'TOP', 'TRY', 'TTD', 'TWD', 'TZS', 'UAH', 'UGX', 'USD', 'USN', 'UYI', 'UYU', 'UYW', 'UZS', 'VES', 'VND', 'VUV', 'WST', 'XAF', 'XAG', 'XAU', 'XBA', 'XBB', 'XBC', 'XBD', 'XCD', 'XDR', 'XOF', 'XPD', 'XPF', 'XPT', 'XSU', 'XTS', 'XUA', 'XXX', 'YER', 'ZAR', 'ZMW', 'ZWL']); +function isISO4217(str) { + assertString(str); + return validISO4217CurrencyCodes.has(str.toUpperCase()); +} + +var base32 = /^[A-Z2-7]+=*$/; +function isBase32(str) { + assertString(str); + var len = str.length; + + if (len % 8 === 0 && base32.test(str)) { + return true; + } + + return false; +} + +var base58Reg = /^[A-HJ-NP-Za-km-z1-9]*$/; +function isBase58(str) { + assertString(str); + + if (base58Reg.test(str)) { + return true; + } + + return false; +} + +var validMediaType = /^[a-z]+\/[a-z0-9\-\+]+$/i; +var validAttribute = /^[a-z\-]+=[a-z0-9\-]+$/i; +var validData = /^[a-z0-9!\$&'\(\)\*\+,;=\-\._~:@\/\?%\s]*$/i; +function isDataURI(str) { + assertString(str); + var data = str.split(','); + + if (data.length < 2) { + return false; + } + + var attributes = data.shift().trim().split(';'); + var schemeAndMediaType = attributes.shift(); + + if (schemeAndMediaType.substr(0, 5) !== 'data:') { + return false; + } + + var mediaType = schemeAndMediaType.substr(5); + + if (mediaType !== '' && !validMediaType.test(mediaType)) { + return false; + } + + for (var i = 0; i < attributes.length; i++) { + if (!(i === attributes.length - 1 && attributes[i].toLowerCase() === 'base64') && !validAttribute.test(attributes[i])) { + return false; + } + } + + for (var _i = 0; _i < data.length; _i++) { + if (!validData.test(data[_i])) { + return false; + } + } + + return true; +} + +var magnetURI = /^magnet:\?xt(?:\.1)?=urn:(?:aich|bitprint|btih|ed2k|ed2khash|kzhash|md5|sha1|tree:tiger):[a-z0-9]{32}(?:[a-z0-9]{8})?($|&)/i; +function isMagnetURI(url) { + assertString(url); + return magnetURI.test(url.trim()); +} + +/* + Checks if the provided string matches to a correct Media type format (MIME type) + + This function only checks is the string format follows the + etablished rules by the according RFC specifications. + This function supports 'charset' in textual media types + (https://tools.ietf.org/html/rfc6657). + + This function does not check against all the media types listed + by the IANA (https://www.iana.org/assignments/media-types/media-types.xhtml) + because of lightness purposes : it would require to include + all these MIME types in this librairy, which would weigh it + significantly. This kind of effort maybe is not worth for the use that + this function has in this entire librairy. + + More informations in the RFC specifications : + - https://tools.ietf.org/html/rfc2045 + - https://tools.ietf.org/html/rfc2046 + - https://tools.ietf.org/html/rfc7231#section-3.1.1.1 + - https://tools.ietf.org/html/rfc7231#section-3.1.1.5 +*/ +// Match simple MIME types +// NB : +// Subtype length must not exceed 100 characters. +// This rule does not comply to the RFC specs (what is the max length ?). + +var mimeTypeSimple = /^(application|audio|font|image|message|model|multipart|text|video)\/[a-zA-Z0-9\.\-\+]{1,100}$/i; // eslint-disable-line max-len +// Handle "charset" in "text/*" + +var mimeTypeText = /^text\/[a-zA-Z0-9\.\-\+]{1,100};\s?charset=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?$/i; // eslint-disable-line max-len +// Handle "boundary" in "multipart/*" + +var mimeTypeMultipart = /^multipart\/[a-zA-Z0-9\.\-\+]{1,100}(;\s?(boundary|charset)=("[a-zA-Z0-9\.\-\+\s]{0,70}"|[a-zA-Z0-9\.\-\+]{0,70})(\s?\([a-zA-Z0-9\.\-\+\s]{1,20}\))?){0,2}$/i; // eslint-disable-line max-len + +function isMimeType(str) { + assertString(str); + return mimeTypeSimple.test(str) || mimeTypeText.test(str) || mimeTypeMultipart.test(str); +} + +var lat = /^\(?[+-]?(90(\.0+)?|[1-8]?\d(\.\d+)?)$/; +var _long = /^\s?[+-]?(180(\.0+)?|1[0-7]\d(\.\d+)?|\d{1,2}(\.\d+)?)\)?$/; +var latDMS = /^(([1-8]?\d)\D+([1-5]?\d|60)\D+([1-5]?\d|60)(\.\d+)?|90\D+0\D+0)\D+[NSns]?$/i; +var longDMS = /^\s*([1-7]?\d{1,2}\D+([1-5]?\d|60)\D+([1-5]?\d|60)(\.\d+)?|180\D+0\D+0)\D+[EWew]?$/i; +var defaultLatLongOptions = { + checkDMS: false +}; +function isLatLong(str, options) { + assertString(str); + options = merge(options, defaultLatLongOptions); + if (!str.includes(',')) return false; + var pair = str.split(','); + if (pair[0].startsWith('(') && !pair[1].endsWith(')') || pair[1].endsWith(')') && !pair[0].startsWith('(')) return false; + + if (options.checkDMS) { + return latDMS.test(pair[0]) && longDMS.test(pair[1]); + } + + return lat.test(pair[0]) && _long.test(pair[1]); +} + +var threeDigit = /^\d{3}$/; +var fourDigit = /^\d{4}$/; +var fiveDigit = /^\d{5}$/; +var sixDigit = /^\d{6}$/; +var patterns = { + AD: /^AD\d{3}$/, + AT: fourDigit, + AU: fourDigit, + AZ: /^AZ\d{4}$/, + BE: fourDigit, + BG: fourDigit, + BR: /^\d{5}-\d{3}$/, + BY: /2[1-4]{1}\d{4}$/, + CA: /^[ABCEGHJKLMNPRSTVXY]\d[ABCEGHJ-NPRSTV-Z][\s\-]?\d[ABCEGHJ-NPRSTV-Z]\d$/i, + CH: fourDigit, + CN: /^(0[1-7]|1[012356]|2[0-7]|3[0-6]|4[0-7]|5[1-7]|6[1-7]|7[1-5]|8[1345]|9[09])\d{4}$/, + CZ: /^\d{3}\s?\d{2}$/, + DE: fiveDigit, + DK: fourDigit, + DO: fiveDigit, + DZ: fiveDigit, + EE: fiveDigit, + ES: /^(5[0-2]{1}|[0-4]{1}\d{1})\d{3}$/, + FI: fiveDigit, + FR: /^\d{2}\s?\d{3}$/, + GB: /^(gir\s?0aa|[a-z]{1,2}\d[\da-z]?\s?(\d[a-z]{2})?)$/i, + GR: /^\d{3}\s?\d{2}$/, + HR: /^([1-5]\d{4}$)/, + HT: /^HT\d{4}$/, + HU: fourDigit, + ID: fiveDigit, + IE: /^(?!.*(?:o))[A-Za-z]\d[\dw]\s\w{4}$/i, + IL: /^(\d{5}|\d{7})$/, + IN: /^((?!10|29|35|54|55|65|66|86|87|88|89)[1-9][0-9]{5})$/, + IR: /\b(?!(\d)\1{3})[13-9]{4}[1346-9][013-9]{5}\b/, + IS: threeDigit, + IT: fiveDigit, + JP: /^\d{3}\-\d{4}$/, + KE: fiveDigit, + KR: /^(\d{5}|\d{6})$/, + LI: /^(948[5-9]|949[0-7])$/, + LT: /^LT\-\d{5}$/, + LU: fourDigit, + LV: /^LV\-\d{4}$/, + LK: fiveDigit, + MX: fiveDigit, + MT: /^[A-Za-z]{3}\s{0,1}\d{4}$/, + MY: fiveDigit, + NL: /^\d{4}\s?[a-z]{2}$/i, + NO: fourDigit, + NP: /^(10|21|22|32|33|34|44|45|56|57)\d{3}$|^(977)$/i, + NZ: fourDigit, + PL: /^\d{2}\-\d{3}$/, + PR: /^00[679]\d{2}([ -]\d{4})?$/, + PT: /^\d{4}\-\d{3}?$/, + RO: sixDigit, + RU: sixDigit, + SA: fiveDigit, + SE: /^[1-9]\d{2}\s?\d{2}$/, + SG: sixDigit, + SI: fourDigit, + SK: /^\d{3}\s?\d{2}$/, + TH: fiveDigit, + TN: fourDigit, + TW: /^\d{3}(\d{2})?$/, + UA: fiveDigit, + US: /^\d{5}(-\d{4})?$/, + ZA: fourDigit, + ZM: fiveDigit +}; +var locales$5 = Object.keys(patterns); +function isPostalCode(str, locale) { + assertString(str); + + if (locale in patterns) { + return patterns[locale].test(str); + } else if (locale === 'any') { + for (var key in patterns) { + // https://github.com/gotwarlost/istanbul/blob/master/ignoring-code-for-coverage.md#ignoring-code-for-coverage-purposes + // istanbul ignore else + if (patterns.hasOwnProperty(key)) { + var pattern = patterns[key]; + + if (pattern.test(str)) { + return true; + } + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +function ltrim(str, chars) { + assertString(str); // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping + + var pattern = chars ? new RegExp("^[".concat(chars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), "]+"), 'g') : /^\s+/g; + return str.replace(pattern, ''); +} + +function rtrim(str, chars) { + assertString(str); + + if (chars) { + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Escaping + var pattern = new RegExp("[".concat(chars.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), "]+$"), 'g'); + return str.replace(pattern, ''); + } // Use a faster and more safe than regex trim method https://blog.stevenlevithan.com/archives/faster-trim-javascript + + + var strIndex = str.length - 1; + + while (/\s/.test(str.charAt(strIndex))) { + strIndex -= 1; + } + + return str.slice(0, strIndex + 1); +} + +function trim(str, chars) { + return rtrim(ltrim(str, chars), chars); +} + +function escape(str) { + assertString(str); + return str.replace(/&/g, '&').replace(/"/g, '"').replace(/'/g, ''').replace(//g, '>').replace(/\//g, '/').replace(/\\/g, '\').replace(/`/g, '`'); +} + +function unescape(str) { + assertString(str); + return str.replace(/"/g, '"').replace(/'/g, "'").replace(/</g, '<').replace(/>/g, '>').replace(///g, '/').replace(/\/g, '\\').replace(/`/g, '`').replace(/&/g, '&'); // & replacement has to be the last one to prevent + // bugs with intermediate strings containing escape sequences + // See: https://github.com/validatorjs/validator.js/issues/1827 +} + +function blacklist$1(str, chars) { + assertString(str); + return str.replace(new RegExp("[".concat(chars, "]+"), 'g'), ''); +} + +function stripLow(str, keep_new_lines) { + assertString(str); + var chars = keep_new_lines ? '\\x00-\\x09\\x0B\\x0C\\x0E-\\x1F\\x7F' : '\\x00-\\x1F\\x7F'; + return blacklist$1(str, chars); +} + +function whitelist(str, chars) { + assertString(str); + return str.replace(new RegExp("[^".concat(chars, "]+"), 'g'), ''); +} + +function isWhitelisted(str, chars) { + assertString(str); + + for (var i = str.length - 1; i >= 0; i--) { + if (chars.indexOf(str[i]) === -1) { + return false; + } + } + + return true; +} + +var default_normalize_email_options = { + // The following options apply to all email addresses + // Lowercases the local part of the email address. + // Please note this may violate RFC 5321 as per http://stackoverflow.com/a/9808332/192024). + // The domain is always lowercased, as per RFC 1035 + all_lowercase: true, + // The following conversions are specific to GMail + // Lowercases the local part of the GMail address (known to be case-insensitive) + gmail_lowercase: true, + // Removes dots from the local part of the email address, as that's ignored by GMail + gmail_remove_dots: true, + // Removes the subaddress (e.g. "+foo") from the email address + gmail_remove_subaddress: true, + // Conversts the googlemail.com domain to gmail.com + gmail_convert_googlemaildotcom: true, + // The following conversions are specific to Outlook.com / Windows Live / Hotmail + // Lowercases the local part of the Outlook.com address (known to be case-insensitive) + outlookdotcom_lowercase: true, + // Removes the subaddress (e.g. "+foo") from the email address + outlookdotcom_remove_subaddress: true, + // The following conversions are specific to Yahoo + // Lowercases the local part of the Yahoo address (known to be case-insensitive) + yahoo_lowercase: true, + // Removes the subaddress (e.g. "-foo") from the email address + yahoo_remove_subaddress: true, + // The following conversions are specific to Yandex + // Lowercases the local part of the Yandex address (known to be case-insensitive) + yandex_lowercase: true, + // The following conversions are specific to iCloud + // Lowercases the local part of the iCloud address (known to be case-insensitive) + icloud_lowercase: true, + // Removes the subaddress (e.g. "+foo") from the email address + icloud_remove_subaddress: true +}; // List of domains used by iCloud + +var icloud_domains = ['icloud.com', 'me.com']; // List of domains used by Outlook.com and its predecessors +// This list is likely incomplete. +// Partial reference: +// https://blogs.office.com/2013/04/17/outlook-com-gets-two-step-verification-sign-in-by-alias-and-new-international-domains/ + +var outlookdotcom_domains = ['hotmail.at', 'hotmail.be', 'hotmail.ca', 'hotmail.cl', 'hotmail.co.il', 'hotmail.co.nz', 'hotmail.co.th', 'hotmail.co.uk', 'hotmail.com', 'hotmail.com.ar', 'hotmail.com.au', 'hotmail.com.br', 'hotmail.com.gr', 'hotmail.com.mx', 'hotmail.com.pe', 'hotmail.com.tr', 'hotmail.com.vn', 'hotmail.cz', 'hotmail.de', 'hotmail.dk', 'hotmail.es', 'hotmail.fr', 'hotmail.hu', 'hotmail.id', 'hotmail.ie', 'hotmail.in', 'hotmail.it', 'hotmail.jp', 'hotmail.kr', 'hotmail.lv', 'hotmail.my', 'hotmail.ph', 'hotmail.pt', 'hotmail.sa', 'hotmail.sg', 'hotmail.sk', 'live.be', 'live.co.uk', 'live.com', 'live.com.ar', 'live.com.mx', 'live.de', 'live.es', 'live.eu', 'live.fr', 'live.it', 'live.nl', 'msn.com', 'outlook.at', 'outlook.be', 'outlook.cl', 'outlook.co.il', 'outlook.co.nz', 'outlook.co.th', 'outlook.com', 'outlook.com.ar', 'outlook.com.au', 'outlook.com.br', 'outlook.com.gr', 'outlook.com.pe', 'outlook.com.tr', 'outlook.com.vn', 'outlook.cz', 'outlook.de', 'outlook.dk', 'outlook.es', 'outlook.fr', 'outlook.hu', 'outlook.id', 'outlook.ie', 'outlook.in', 'outlook.it', 'outlook.jp', 'outlook.kr', 'outlook.lv', 'outlook.my', 'outlook.ph', 'outlook.pt', 'outlook.sa', 'outlook.sg', 'outlook.sk', 'passport.com']; // List of domains used by Yahoo Mail +// This list is likely incomplete + +var yahoo_domains = ['rocketmail.com', 'yahoo.ca', 'yahoo.co.uk', 'yahoo.com', 'yahoo.de', 'yahoo.fr', 'yahoo.in', 'yahoo.it', 'ymail.com']; // List of domains used by yandex.ru + +var yandex_domains = ['yandex.ru', 'yandex.ua', 'yandex.kz', 'yandex.com', 'yandex.by', 'ya.ru']; // replace single dots, but not multiple consecutive dots + +function dotsReplacer(match) { + if (match.length > 1) { + return match; + } + + return ''; +} + +function normalizeEmail(email, options) { + options = merge(options, default_normalize_email_options); + var raw_parts = email.split('@'); + var domain = raw_parts.pop(); + var user = raw_parts.join('@'); + var parts = [user, domain]; // The domain is always lowercased, as it's case-insensitive per RFC 1035 + + parts[1] = parts[1].toLowerCase(); + + if (parts[1] === 'gmail.com' || parts[1] === 'googlemail.com') { + // Address is GMail + if (options.gmail_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (options.gmail_remove_dots) { + // this does not replace consecutive dots like example..email@gmail.com + parts[0] = parts[0].replace(/\.+/g, dotsReplacer); + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.gmail_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + + parts[1] = options.gmail_convert_googlemaildotcom ? 'gmail.com' : parts[1]; + } else if (icloud_domains.indexOf(parts[1]) >= 0) { + // Address is iCloud + if (options.icloud_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.icloud_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (outlookdotcom_domains.indexOf(parts[1]) >= 0) { + // Address is Outlook.com + if (options.outlookdotcom_remove_subaddress) { + parts[0] = parts[0].split('+')[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.outlookdotcom_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (yahoo_domains.indexOf(parts[1]) >= 0) { + // Address is Yahoo + if (options.yahoo_remove_subaddress) { + var components = parts[0].split('-'); + parts[0] = components.length > 1 ? components.slice(0, -1).join('-') : components[0]; + } + + if (!parts[0].length) { + return false; + } + + if (options.all_lowercase || options.yahoo_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + } else if (yandex_domains.indexOf(parts[1]) >= 0) { + if (options.all_lowercase || options.yandex_lowercase) { + parts[0] = parts[0].toLowerCase(); + } + + parts[1] = 'yandex.ru'; // all yandex domains are equal, 1st preferred + } else if (options.all_lowercase) { + // Any other address + parts[0] = parts[0].toLowerCase(); + } + + return parts.join('@'); +} + +var charsetRegex = /^[^\s-_](?!.*?[-_]{2,})[a-z0-9-\\][^\s]*[^-_\s]$/; +function isSlug(str) { + assertString(str); + return charsetRegex.test(str); +} + +var validators$1 = { + 'cs-CZ': function csCZ(str) { + return /^(([ABCDEFHKIJKLMNPRSTUVXYZ]|[0-9])-?){5,8}$/.test(str); + }, + 'de-DE': function deDE(str) { + return /^((AW|UL|AK|GA|AÖ|LF|AZ|AM|AS|ZE|AN|AB|A|KG|KH|BA|EW|BZ|HY|KM|BT|HP|B|BC|BI|BO|FN|TT|ÜB|BN|AH|BS|FR|HB|ZZ|BB|BK|BÖ|OC|OK|CW|CE|C|CO|LH|CB|KW|LC|LN|DA|DI|DE|DH|SY|NÖ|DO|DD|DU|DN|D|EI|EA|EE|FI|EM|EL|EN|PF|ED|EF|ER|AU|ZP|E|ES|NT|EU|FL|FO|FT|FF|F|FS|FD|FÜ|GE|G|GI|GF|GS|ZR|GG|GP|GR|NY|ZI|GÖ|GZ|GT|HA|HH|HM|HU|WL|HZ|WR|RN|HK|HD|HN|HS|GK|HE|HF|RZ|HI|HG|HO|HX|IK|IL|IN|J|JL|KL|KA|KS|KF|KE|KI|KT|KO|KN|KR|KC|KU|K|LD|LL|LA|L|OP|LM|LI|LB|LU|LÖ|HL|LG|MD|GN|MZ|MA|ML|MR|MY|AT|DM|MC|NZ|RM|RG|MM|ME|MB|MI|FG|DL|HC|MW|RL|MK|MG|MÜ|WS|MH|M|MS|NU|NB|ND|NM|NK|NW|NR|NI|NF|DZ|EB|OZ|TG|TO|N|OA|GM|OB|CA|EH|FW|OF|OL|OE|OG|BH|LR|OS|AA|GD|OH|KY|NP|WK|PB|PA|PE|PI|PS|P|PM|PR|RA|RV|RE|R|H|SB|WN|RS|RD|RT|BM|NE|GV|RP|SU|GL|RO|GÜ|RH|EG|RW|PN|SK|MQ|RU|SZ|RI|SL|SM|SC|HR|FZ|VS|SW|SN|CR|SE|SI|SO|LP|SG|NH|SP|IZ|ST|BF|TE|HV|OD|SR|S|AC|DW|ZW|TF|TS|TR|TÜ|UM|PZ|TP|UE|UN|UH|MN|KK|VB|V|AE|PL|RC|VG|GW|PW|VR|VK|KB|WA|WT|BE|WM|WE|AP|MO|WW|FB|WZ|WI|WB|JE|WF|WO|W|WÜ|BL|Z|GC)[- ]?[A-Z]{1,2}[- ]?\d{1,4}|(AIC|FDB|ABG|SLN|SAW|KLZ|BUL|ESB|NAB|SUL|WST|ABI|AZE|BTF|KÖT|DKB|FEU|ROT|ALZ|SMÜ|WER|AUR|NOR|DÜW|BRK|HAB|TÖL|WOR|BAD|BAR|BER|BIW|EBS|KEM|MÜB|PEG|BGL|BGD|REI|WIL|BKS|BIR|WAT|BOR|BOH|BOT|BRB|BLK|HHM|NEB|NMB|WSF|LEO|HDL|WMS|WZL|BÜS|CHA|KÖZ|ROD|WÜM|CLP|NEC|COC|ZEL|COE|CUX|DAH|LDS|DEG|DEL|RSL|DLG|DGF|LAN|HEI|MED|DON|KIB|ROK|JÜL|MON|SLE|EBE|EIC|HIG|WBS|BIT|PRÜ|LIB|EMD|WIT|ERH|HÖS|ERZ|ANA|ASZ|MAB|MEK|STL|SZB|FDS|HCH|HOR|WOL|FRG|GRA|WOS|FRI|FFB|GAP|GER|BRL|CLZ|GTH|NOH|HGW|GRZ|LÖB|NOL|WSW|DUD|HMÜ|OHA|KRU|HAL|HAM|HBS|QLB|HVL|NAU|HAS|EBN|GEO|HOH|HDH|ERK|HER|WAN|HEF|ROF|HBN|ALF|HSK|USI|NAI|REH|SAN|KÜN|ÖHR|HOL|WAR|ARN|BRG|GNT|HOG|WOH|KEH|MAI|PAR|RID|ROL|KLE|GEL|KUS|KYF|ART|SDH|LDK|DIL|MAL|VIB|LER|BNA|GHA|GRM|MTL|WUR|LEV|LIF|STE|WEL|LIP|VAI|LUP|HGN|LBZ|LWL|PCH|STB|DAN|MKK|SLÜ|MSP|TBB|MGH|MTK|BIN|MSH|EIL|HET|SGH|BID|MYK|MSE|MST|MÜR|WRN|MEI|GRH|RIE|MZG|MIL|OBB|BED|FLÖ|MOL|FRW|SEE|SRB|AIB|MOS|BCH|ILL|SOB|NMS|NEA|SEF|UFF|NEW|VOH|NDH|TDO|NWM|GDB|GVM|WIS|NOM|EIN|GAN|LAU|HEB|OHV|OSL|SFB|ERB|LOS|BSK|KEL|BSB|MEL|WTL|OAL|FÜS|MOD|OHZ|OPR|BÜR|PAF|PLÖ|CAS|GLA|REG|VIT|ECK|SIM|GOA|EMS|DIZ|GOH|RÜD|SWA|NES|KÖN|MET|LRO|BÜZ|DBR|ROS|TET|HRO|ROW|BRV|HIP|PAN|GRI|SHK|EIS|SRO|SOK|LBS|SCZ|MER|QFT|SLF|SLS|HOM|SLK|ASL|BBG|SBK|SFT|SHG|MGN|MEG|ZIG|SAD|NEN|OVI|SHA|BLB|SIG|SON|SPN|FOR|GUB|SPB|IGB|WND|STD|STA|SDL|OBG|HST|BOG|SHL|PIR|FTL|SEB|SÖM|SÜW|TIR|SAB|TUT|ANG|SDT|LÜN|LSZ|MHL|VEC|VER|VIE|OVL|ANK|OVP|SBG|UEM|UER|WLG|GMN|NVP|RDG|RÜG|DAU|FKB|WAF|WAK|SLZ|WEN|SOG|APD|WUG|GUN|ESW|WIZ|WES|DIN|BRA|BÜD|WHV|HWI|GHC|WTM|WOB|WUN|MAK|SEL|OCH|HOT|WDA)[- ]?(([A-Z][- ]?\d{1,4})|([A-Z]{2}[- ]?\d{1,3})))[- ]?(E|H)?$/.test(str); + }, + 'de-LI': function deLI(str) { + return /^FL[- ]?\d{1,5}[UZ]?$/.test(str); + }, + 'fi-FI': function fiFI(str) { + return /^(?=.{4,7})(([A-Z]{1,3}|[0-9]{1,3})[\s-]?([A-Z]{1,3}|[0-9]{1,5}))$/.test(str); + }, + 'pt-PT': function ptPT(str) { + return /^([A-Z]{2}|[0-9]{2})[ -·]?([A-Z]{2}|[0-9]{2})[ -·]?([A-Z]{2}|[0-9]{2})$/.test(str); + }, + 'sq-AL': function sqAL(str) { + return /^[A-Z]{2}[- ]?((\d{3}[- ]?(([A-Z]{2})|T))|(R[- ]?\d{3}))$/.test(str); + }, + 'pt-BR': function ptBR(str) { + return /^[A-Z]{3}[ -]?[0-9][A-Z][0-9]{2}|[A-Z]{3}[ -]?[0-9]{4}$/.test(str); + } +}; +function isLicensePlate(str, locale) { + assertString(str); + + if (locale in validators$1) { + return validators$1[locale](str); + } else if (locale === 'any') { + for (var key in validators$1) { + /* eslint guard-for-in: 0 */ + var validator = validators$1[key]; + + if (validator(str)) { + return true; + } + } + + return false; + } + + throw new Error("Invalid locale '".concat(locale, "'")); +} + +var upperCaseRegex = /^[A-Z]$/; +var lowerCaseRegex = /^[a-z]$/; +var numberRegex = /^[0-9]$/; +var symbolRegex = /^[-#!$@%^&*()_+|~=`{}\[\]:";'<>?,.\/ ]$/; +var defaultOptions$1 = { + minLength: 8, + minLowercase: 1, + minUppercase: 1, + minNumbers: 1, + minSymbols: 1, + returnScore: false, + pointsPerUnique: 1, + pointsPerRepeat: 0.5, + pointsForContainingLower: 10, + pointsForContainingUpper: 10, + pointsForContainingNumber: 10, + pointsForContainingSymbol: 10 +}; +/* Counts number of occurrences of each char in a string + * could be moved to util/ ? +*/ + +function countChars(str) { + var result = {}; + Array.from(str).forEach(function (_char) { + var curVal = result[_char]; + + if (curVal) { + result[_char] += 1; + } else { + result[_char] = 1; + } + }); + return result; +} +/* Return information about a password */ + + +function analyzePassword(password) { + var charMap = countChars(password); + var analysis = { + length: password.length, + uniqueChars: Object.keys(charMap).length, + uppercaseCount: 0, + lowercaseCount: 0, + numberCount: 0, + symbolCount: 0 + }; + Object.keys(charMap).forEach(function (_char2) { + /* istanbul ignore else */ + if (upperCaseRegex.test(_char2)) { + analysis.uppercaseCount += charMap[_char2]; + } else if (lowerCaseRegex.test(_char2)) { + analysis.lowercaseCount += charMap[_char2]; + } else if (numberRegex.test(_char2)) { + analysis.numberCount += charMap[_char2]; + } else if (symbolRegex.test(_char2)) { + analysis.symbolCount += charMap[_char2]; + } + }); + return analysis; +} + +function scorePassword(analysis, scoringOptions) { + var points = 0; + points += analysis.uniqueChars * scoringOptions.pointsPerUnique; + points += (analysis.length - analysis.uniqueChars) * scoringOptions.pointsPerRepeat; + + if (analysis.lowercaseCount > 0) { + points += scoringOptions.pointsForContainingLower; + } + + if (analysis.uppercaseCount > 0) { + points += scoringOptions.pointsForContainingUpper; + } + + if (analysis.numberCount > 0) { + points += scoringOptions.pointsForContainingNumber; + } + + if (analysis.symbolCount > 0) { + points += scoringOptions.pointsForContainingSymbol; + } + + return points; +} + +function isStrongPassword(str) { + var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null; + assertString(str); + var analysis = analyzePassword(str); + options = merge(options || {}, defaultOptions$1); + + if (options.returnScore) { + return scorePassword(analysis, options); + } + + return analysis.length >= options.minLength && analysis.lowercaseCount >= options.minLowercase && analysis.uppercaseCount >= options.minUppercase && analysis.numberCount >= options.minNumbers && analysis.symbolCount >= options.minSymbols; +} + +var vatMatchers = { + GB: /^GB((\d{3} \d{4} ([0-8][0-9]|9[0-6]))|(\d{9} \d{3})|(((GD[0-4])|(HA[5-9]))[0-9]{2}))$/, + IT: /^(IT)?[0-9]{11}$/, + NL: /^(NL)?[0-9]{9}B[0-9]{2}$/ +}; +function isVAT(str, countryCode) { + assertString(str); + assertString(countryCode); + + if (countryCode in vatMatchers) { + return vatMatchers[countryCode].test(str); + } + + throw new Error("Invalid country code: '".concat(countryCode, "'")); +} + +var version = '13.7.0'; +var validator = { + version: version, + toDate: toDate, + toFloat: toFloat, + toInt: toInt, + toBoolean: toBoolean, + equals: equals, + contains: contains, + matches: matches, + isEmail: isEmail, + isURL: isURL, + isMACAddress: isMACAddress, + isIP: isIP, + isIPRange: isIPRange, + isFQDN: isFQDN, + isBoolean: isBoolean, + isIBAN: isIBAN, + isBIC: isBIC, + isAlpha: isAlpha, + isAlphaLocales: locales$1, + isAlphanumeric: isAlphanumeric, + isAlphanumericLocales: locales$2, + isNumeric: isNumeric, + isPassportNumber: isPassportNumber, + isPort: isPort, + isLowercase: isLowercase, + isUppercase: isUppercase, + isAscii: isAscii, + isFullWidth: isFullWidth, + isHalfWidth: isHalfWidth, + isVariableWidth: isVariableWidth, + isMultibyte: isMultibyte, + isSemVer: isSemVer, + isSurrogatePair: isSurrogatePair, + isInt: isInt, + isIMEI: isIMEI, + isFloat: isFloat, + isFloatLocales: locales, + isDecimal: isDecimal, + isHexadecimal: isHexadecimal, + isOctal: isOctal, + isDivisibleBy: isDivisibleBy, + isHexColor: isHexColor, + isRgbColor: isRgbColor, + isHSL: isHSL, + isISRC: isISRC, + isMD5: isMD5, + isHash: isHash, + isJWT: isJWT, + isJSON: isJSON, + isEmpty: isEmpty, + isLength: isLength, + isLocale: isLocale, + isByteLength: isByteLength, + isUUID: isUUID, + isMongoId: isMongoId, + isAfter: isAfter, + isBefore: isBefore, + isIn: isIn, + isCreditCard: isCreditCard, + isIdentityCard: isIdentityCard, + isEAN: isEAN, + isISIN: isISIN, + isISBN: isISBN, + isISSN: isISSN, + isMobilePhone: isMobilePhone, + isMobilePhoneLocales: locales$4, + isPostalCode: isPostalCode, + isPostalCodeLocales: locales$5, + isEthereumAddress: isEthereumAddress, + isCurrency: isCurrency, + isBtcAddress: isBtcAddress, + isISO8601: isISO8601, + isRFC3339: isRFC3339, + isISO31661Alpha2: isISO31661Alpha2, + isISO31661Alpha3: isISO31661Alpha3, + isISO4217: isISO4217, + isBase32: isBase32, + isBase58: isBase58, + isBase64: isBase64, + isDataURI: isDataURI, + isMagnetURI: isMagnetURI, + isMimeType: isMimeType, + isLatLong: isLatLong, + ltrim: ltrim, + rtrim: rtrim, + trim: trim, + escape: escape, + unescape: unescape, + stripLow: stripLow, + whitelist: whitelist, + blacklist: blacklist$1, + isWhitelisted: isWhitelisted, + normalizeEmail: normalizeEmail, + toString: toString, + isSlug: isSlug, + isStrongPassword: isStrongPassword, + isTaxID: isTaxID, + isDate: isDate, + isLicensePlate: isLicensePlate, + isVAT: isVAT, + ibanLocales: locales$3 +}; + +return validator; + +}))); diff --git a/node_modules/validator/validator.min.js b/node_modules/validator/validator.min.js new file mode 100644 index 0000000..f847fd2 --- /dev/null +++ b/node_modules/validator/validator.min.js @@ -0,0 +1,23 @@ +/*! + * Copyright (c) 2018 Chris O'Hara + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.validator=e()}(this,function(){"use strict";function i(t){return(i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t})(t)}function u(t,e){return function(t){if(Array.isArray(t))return t}(t)||function(t,e){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(t)){var r=[],n=!0,i=!1,a=void 0;try{for(var o,s=t[Symbol.iterator]();!(n=(o=s.next()).done)&&(r.push(o.value),!e||r.length!==e);n=!0);}catch(t){i=!0,a=t}finally{try{n||null==s.return||s.return()}finally{if(i)throw a}}return r}}(t,e)||d(t,e)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function r(t){return function(t){if(Array.isArray(t))return n(t)}(t)||function(t){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(t))return Array.from(t)}(t)||d(t)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function d(t,e){if(t){if("string"==typeof t)return n(t,e);var r=Object.prototype.toString.call(t).slice(8,-1);return"Map"===(r="Object"===r&&t.constructor?t.constructor.name:r)||"Set"===r?Array.from(t):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?n(t,e):void 0}}function n(t,e){(null==e||e>t.length)&&(e=t.length);for(var r=0,n=new Array(e);r=e.min)&&(!e.hasOwnProperty("max")||n<=e.max)&&(!e.hasOwnProperty("lt")||ne.gt)}o["fr-CA"]=o["fr-FR"],s["fr-CA"]=s["fr-FR"],o["pt-BR"]=o["pt-PT"],s["pt-BR"]=s["pt-PT"],c["pt-BR"]=c["pt-PT"],o["pl-Pl"]=o["pl-PL"],s["pl-Pl"]=s["pl-PL"],c["pl-Pl"]=c["pl-PL"],o["fa-AF"]=o.fa;var m=Object.keys(c);function Z(t){return L(t)?parseFloat(t):NaN}function M(t){return"object"===i(t)&&null!==t?t="function"==typeof t.toString?t.toString():"[object Object]":(null==t||isNaN(t)&&!t.length)&&(t=""),String(t)}function B(t,e){var r,n=0=t.length?{done:!0}:{done:!1,value:t[n++]}},e:function(t){throw t},f:e}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var i,a=!0,o=!1;return{s:function(){r=t[Symbol.iterator]()},n:function(){var t=r.next();return a=t.done,t},e:function(t){o=!0,i=t},f:function(){try{a||null==r.return||r.return()}finally{if(o)throw i}}}}(function(t,e){for(var r=[],n=Math.min(t.length,e.length),i=0;i=e.min,i=!e.hasOwnProperty("max")||t<=e.max,a=!e.hasOwnProperty("lt")||te.gt;return r.test(t)&&n&&i&&a&&e}var dt=/^[0-9]{15}$/,ft=/^\d{2}-\d{6}-\d{6}-\d{1}$/;var At=/^[\x00-\x7F]+$/;var $t=/[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/;var pt=/[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]/;var St=/[^\x00-\x7F]/;var ht,gt,Et=(gt="i",ht=(ht=["^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)","(?:-((?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-z-][0-9a-z-]*))*))","?(?:\\+([0-9a-z-]+(?:\\.[0-9a-z-]+)*))?$"]).join(""),new RegExp(ht,gt));var It=/[\uD800-\uDBFF][\uDC00-\uDFFF]/;var Rt={force_decimal:!1,decimal_digits:"1,",locale:"en-US"},vt=["","-","+"];var Lt=/^(0x|0h)?[0-9A-F]+$/i;function mt(t){return l(t),Lt.test(t)}var Zt=/^(0o)?[0-7]+$/i;var Mt=/^#?([0-9A-F]{3}|[0-9A-F]{4}|[0-9A-F]{6}|[0-9A-F]{8})$/i;var Bt=/^rgb\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\)$/,Nt=/^rgba\((([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)$/,Ft=/^rgb\((([0-9]%|[1-9][0-9]%|100%),){2}([0-9]%|[1-9][0-9]%|100%)\)/,Dt=/^rgba\((([0-9]%|[1-9][0-9]%|100%),){3}(0?\.\d|1(\.0)?|0(\.0)?)\)/;var Ct=/^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(,(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}(,((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?))?\)$/i,Tt=/^hsla?\(((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?))(deg|grad|rad|turn)?(\s(\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%){2}\s?(\/\s((\+|\-)?([0-9]+(\.[0-9]+)?(e(\+|\-)?[0-9]+)?|\.[0-9]+(e(\+|\-)?[0-9]+)?)%?)\s?)?\)$/i;var Gt=/^[A-Z]{2}[0-9A-Z]{3}\d{2}\d{5}$/;var Ot={AD:/^(AD[0-9]{2})\d{8}[A-Z0-9]{12}$/,AE:/^(AE[0-9]{2})\d{3}\d{16}$/,AL:/^(AL[0-9]{2})\d{8}[A-Z0-9]{16}$/,AT:/^(AT[0-9]{2})\d{16}$/,AZ:/^(AZ[0-9]{2})[A-Z0-9]{4}\d{20}$/,BA:/^(BA[0-9]{2})\d{16}$/,BE:/^(BE[0-9]{2})\d{12}$/,BG:/^(BG[0-9]{2})[A-Z]{4}\d{6}[A-Z0-9]{8}$/,BH:/^(BH[0-9]{2})[A-Z]{4}[A-Z0-9]{14}$/,BR:/^(BR[0-9]{2})\d{23}[A-Z]{1}[A-Z0-9]{1}$/,BY:/^(BY[0-9]{2})[A-Z0-9]{4}\d{20}$/,CH:/^(CH[0-9]{2})\d{5}[A-Z0-9]{12}$/,CR:/^(CR[0-9]{2})\d{18}$/,CY:/^(CY[0-9]{2})\d{8}[A-Z0-9]{16}$/,CZ:/^(CZ[0-9]{2})\d{20}$/,DE:/^(DE[0-9]{2})\d{18}$/,DK:/^(DK[0-9]{2})\d{14}$/,DO:/^(DO[0-9]{2})[A-Z]{4}\d{20}$/,EE:/^(EE[0-9]{2})\d{16}$/,EG:/^(EG[0-9]{2})\d{25}$/,ES:/^(ES[0-9]{2})\d{20}$/,FI:/^(FI[0-9]{2})\d{14}$/,FO:/^(FO[0-9]{2})\d{14}$/,FR:/^(FR[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/,GB:/^(GB[0-9]{2})[A-Z]{4}\d{14}$/,GE:/^(GE[0-9]{2})[A-Z0-9]{2}\d{16}$/,GI:/^(GI[0-9]{2})[A-Z]{4}[A-Z0-9]{15}$/,GL:/^(GL[0-9]{2})\d{14}$/,GR:/^(GR[0-9]{2})\d{7}[A-Z0-9]{16}$/,GT:/^(GT[0-9]{2})[A-Z0-9]{4}[A-Z0-9]{20}$/,HR:/^(HR[0-9]{2})\d{17}$/,HU:/^(HU[0-9]{2})\d{24}$/,IE:/^(IE[0-9]{2})[A-Z0-9]{4}\d{14}$/,IL:/^(IL[0-9]{2})\d{19}$/,IQ:/^(IQ[0-9]{2})[A-Z]{4}\d{15}$/,IR:/^(IR[0-9]{2})0\d{2}0\d{18}$/,IS:/^(IS[0-9]{2})\d{22}$/,IT:/^(IT[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/,JO:/^(JO[0-9]{2})[A-Z]{4}\d{22}$/,KW:/^(KW[0-9]{2})[A-Z]{4}[A-Z0-9]{22}$/,KZ:/^(KZ[0-9]{2})\d{3}[A-Z0-9]{13}$/,LB:/^(LB[0-9]{2})\d{4}[A-Z0-9]{20}$/,LC:/^(LC[0-9]{2})[A-Z]{4}[A-Z0-9]{24}$/,LI:/^(LI[0-9]{2})\d{5}[A-Z0-9]{12}$/,LT:/^(LT[0-9]{2})\d{16}$/,LU:/^(LU[0-9]{2})\d{3}[A-Z0-9]{13}$/,LV:/^(LV[0-9]{2})[A-Z]{4}[A-Z0-9]{13}$/,MC:/^(MC[0-9]{2})\d{10}[A-Z0-9]{11}\d{2}$/,MD:/^(MD[0-9]{2})[A-Z0-9]{20}$/,ME:/^(ME[0-9]{2})\d{18}$/,MK:/^(MK[0-9]{2})\d{3}[A-Z0-9]{10}\d{2}$/,MR:/^(MR[0-9]{2})\d{23}$/,MT:/^(MT[0-9]{2})[A-Z]{4}\d{5}[A-Z0-9]{18}$/,MU:/^(MU[0-9]{2})[A-Z]{4}\d{19}[A-Z]{3}$/,MZ:/^(MZ[0-9]{2})\d{21}$/,NL:/^(NL[0-9]{2})[A-Z]{4}\d{10}$/,NO:/^(NO[0-9]{2})\d{11}$/,PK:/^(PK[0-9]{2})[A-Z0-9]{4}\d{16}$/,PL:/^(PL[0-9]{2})\d{24}$/,PS:/^(PS[0-9]{2})[A-Z0-9]{4}\d{21}$/,PT:/^(PT[0-9]{2})\d{21}$/,QA:/^(QA[0-9]{2})[A-Z]{4}[A-Z0-9]{21}$/,RO:/^(RO[0-9]{2})[A-Z]{4}[A-Z0-9]{16}$/,RS:/^(RS[0-9]{2})\d{18}$/,SA:/^(SA[0-9]{2})\d{2}[A-Z0-9]{18}$/,SC:/^(SC[0-9]{2})[A-Z]{4}\d{20}[A-Z]{3}$/,SE:/^(SE[0-9]{2})\d{20}$/,SI:/^(SI[0-9]{2})\d{15}$/,SK:/^(SK[0-9]{2})\d{20}$/,SM:/^(SM[0-9]{2})[A-Z]{1}\d{10}[A-Z0-9]{12}$/,SV:/^(SV[0-9]{2})[A-Z0-9]{4}\d{20}$/,TL:/^(TL[0-9]{2})\d{19}$/,TN:/^(TN[0-9]{2})\d{20}$/,TR:/^(TR[0-9]{2})\d{5}[A-Z0-9]{17}$/,UA:/^(UA[0-9]{2})\d{6}[A-Z0-9]{19}$/,VA:/^(VA[0-9]{2})\d{18}$/,VG:/^(VG[0-9]{2})[A-Z0-9]{4}\d{16}$/,XK:/^(XK[0-9]{2})\d{16}$/};var _t=Object.keys(Ot),Ht=new Set(["AD","AE","AF","AG","AI","AL","AM","AO","AQ","AR","AS","AT","AU","AW","AX","AZ","BA","BB","BD","BE","BF","BG","BH","BI","BJ","BL","BM","BN","BO","BQ","BR","BS","BT","BV","BW","BY","BZ","CA","CC","CD","CF","CG","CH","CI","CK","CL","CM","CN","CO","CR","CU","CV","CW","CX","CY","CZ","DE","DJ","DK","DM","DO","DZ","EC","EE","EG","EH","ER","ES","ET","FI","FJ","FK","FM","FO","FR","GA","GB","GD","GE","GF","GG","GH","GI","GL","GM","GN","GP","GQ","GR","GS","GT","GU","GW","GY","HK","HM","HN","HR","HT","HU","ID","IE","IL","IM","IN","IO","IQ","IR","IS","IT","JE","JM","JO","JP","KE","KG","KH","KI","KM","KN","KP","KR","KW","KY","KZ","LA","LB","LC","LI","LK","LR","LS","LT","LU","LV","LY","MA","MC","MD","ME","MF","MG","MH","MK","ML","MM","MN","MO","MP","MQ","MR","MS","MT","MU","MV","MW","MX","MY","MZ","NA","NC","NE","NF","NG","NI","NL","NO","NP","NR","NU","NZ","OM","PA","PE","PF","PG","PH","PK","PL","PM","PN","PR","PS","PT","PW","PY","QA","RE","RO","RS","RU","RW","SA","SB","SC","SD","SE","SG","SH","SI","SJ","SK","SL","SM","SN","SO","SR","SS","ST","SV","SX","SY","SZ","TC","TD","TF","TG","TH","TJ","TK","TL","TM","TN","TO","TR","TT","TV","TW","TZ","UA","UG","UM","US","UY","UZ","VA","VC","VE","VG","VI","VN","VU","WF","WS","YE","YT","ZA","ZM","ZW"]);var Pt=Ht,bt=/^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$/;var Ut=/^[a-f0-9]{32}$/;var Kt={md5:32,md4:32,sha1:40,sha256:64,sha384:96,sha512:128,ripemd128:32,ripemd160:40,tiger128:32,tiger160:40,tiger192:48,crc32:8,crc32b:8};var wt=/[^A-Z0-9+\/=]/i,yt=/^[A-Z0-9_\-]*$/i,Wt={urlSafe:!1};function Yt(t,e){l(t),e=B(e,Wt);var r=t.length;if(e.urlSafe)return yt.test(t);if(r%4!=0||wt.test(t))return!1;e=t.indexOf("=");return-1===e||e===r-1||e===r-2&&"="===t[r-1]}var xt={allow_primitives:!1};var Vt={ignore_whitespace:!1};var kt={1:/^[0-9A-F]{8}-[0-9A-F]{4}-1[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i,2:/^[0-9A-F]{8}-[0-9A-F]{4}-2[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i,3:/^[0-9A-F]{8}-[0-9A-F]{4}-3[0-9A-F]{3}-[0-9A-F]{4}-[0-9A-F]{12}$/i,4:/^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i,5:/^[0-9A-F]{8}-[0-9A-F]{4}-5[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i,all:/^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i};var zt=/^(?:4[0-9]{12}(?:[0-9]{3,6})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12,15}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11}|6[27][0-9]{14}|^(81[0-9]{14,17}))$/;var Xt={PL:function(t){l(t);var n={1:1,2:3,3:7,4:9,5:1,6:3,7:7,8:9,9:1,10:3,11:0};if(null!=t&&11===t.length&&ut(t,{allow_leading_zeroes:!0})){var e=t.split("").slice(0,-1).reduce(function(t,e,r){return t+Number(e)*n[r+1]},0)%10,t=Number(t.charAt(t.length-1));if(0==e&&0===t||t===10-e)return!0}return!1},ES:function(t){l(t);var e={X:0,Y:1,Z:2},r=t.trim().toUpperCase();if(!/^[0-9X-Z][0-9]{7}[TRWAGMYFPDXBNJZSQVHLCKE]$/.test(r))return!1;t=r.slice(0,-1).replace(/[X,Y,Z]/g,function(t){return e[t]});return r.endsWith(["T","R","W","A","G","M","Y","F","P","D","X","B","N","J","Z","S","Q","V","H","L","C","K","E"][t%23])},FI:function(t){if(l(t),11!==t.length)return!1;if(!t.match(/^\d{6}[\-A\+]\d{3}[0-9ABCDEFHJKLMNPRSTUVWXY]{1}$/))return!1;return"0123456789ABCDEFHJKLMNPRSTUVWXY"[(1e3*parseInt(t.slice(0,6),10)+parseInt(t.slice(7,10),10))%31]===t.slice(10,11)},IN:function(t){var r=[[0,1,2,3,4,5,6,7,8,9],[1,2,3,4,0,6,7,8,9,5],[2,3,4,0,1,7,8,9,5,6],[3,4,0,1,2,8,9,5,6,7],[4,0,1,2,3,9,5,6,7,8],[5,9,8,7,6,0,4,3,2,1],[6,5,9,8,7,1,0,4,3,2],[7,6,5,9,8,2,1,0,4,3],[8,7,6,5,9,3,2,1,0,4],[9,8,7,6,5,4,3,2,1,0]],n=[[0,1,2,3,4,5,6,7,8,9],[1,5,7,6,2,8,3,0,9,4],[5,8,0,3,7,9,6,1,4,2],[8,9,1,6,0,4,3,5,2,7],[9,4,5,3,1,2,6,8,7,0],[4,2,8,6,5,7,3,9,0,1],[2,7,9,3,8,0,6,4,1,5],[7,0,4,6,9,1,3,2,5,8]],t=t.trim();if(!/^[1-9]\d{3}\s?\d{4}\s?\d{4}$/.test(t))return!1;var i=0;return t.replace(/\s/g,"").split("").map(Number).reverse().forEach(function(t,e){i=r[i][n[e%8][t]]}),0===i},IR:function(t){if(!t.match(/^\d{10}$/))return!1;if(t="0000".concat(t).substr(t.length-6),0===parseInt(t.substr(3,6),10))return!1;for(var e=parseInt(t.substr(9,1),10),r=0,n=0;n<9;n++)r+=parseInt(t.substr(n,1),10)*(10-n);return(r%=11)<2&&e===r||2<=r&&e===11-r},IT:function(t){return 9===t.length&&("CA00000AA"!==t&&-1new Date)&&(t.getFullYear()===e&&t.getMonth()===r-1&&t.getDate()===n)}function a(t){return function(t){for(var e=t.substring(0,17),r=0,n=0;n<17;n++)r+=parseInt(e.charAt(n),10)*parseInt(o[n],10);return s[r%11]}(t)===t.charAt(17).toUpperCase()}var e,r=["11","12","13","14","15","21","22","23","31","32","33","34","35","36","37","41","42","43","44","45","46","50","51","52","53","54","61","62","63","64","65","71","81","82","91"],o=["7","9","10","5","8","4","2","1","6","3","7","9","10","5","8","4","2"],s=["1","0","X","9","8","7","6","5","4","3","2"];return!!/^\d{15}|(\d{17}(\d|x|X))$/.test(e=t)&&(15===e.length?function(t){var e=/^[1-9]\d{7}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}$/.test(t);if(!e)return!1;var r=t.substring(0,2);if(!(e=n(r)))return!1;t="19".concat(t.substring(6,12));return!!(e=i(t))}:function(t){var e=/^[1-9]\d{5}[1-9]\d{3}((0[1-9])|(1[0-2]))((0[1-9])|([1-2][0-9])|(3[0-1]))\d{3}(\d|x|X)$/.test(t);if(!e)return!1;var r=t.substring(0,2);if(!(e=n(r)))return!1;r=t.substring(6,14);return!!(e=i(r))&&a(t)})(e)},"zh-TW":function(t){var n={A:10,B:11,C:12,D:13,E:14,F:15,G:16,H:17,I:34,J:18,K:19,L:20,M:21,N:22,O:35,P:23,Q:24,R:25,S:26,T:27,U:28,V:29,W:32,X:30,Y:31,Z:33},t=t.trim().toUpperCase();return!!/^[A-Z][0-9]{9}$/.test(t)&&Array.from(t).reduce(function(t,e,r){if(0!==r)return 9===r?(10-t%10-Number(e))%10==0:t+Number(e)*(9-r);e=n[e];return e%10*9+Math.floor(e/10)},0)}};var Jt=8,jt=14,Qt=/^(\d{8}|\d{13}|\d{14})$/;function qt(r){var t=10-r.slice(0,-1).split("").map(function(t,e){return Number(t)*(t=r.length,e=e,t===Jt||t===jt?e%2==0?3:1:e%2==0?1:3)}).reduce(function(t,e){return t+e},0)%10;return t<10?t:0}var te=/^[A-Z]{2}[0-9A-Z]{9}[0-9]$/;var ee=/^(?:[0-9]{9}X|[0-9]{10})$/,re=/^(?:[0-9]{13})$/,ne=[1,3];function ie(t){for(var e=10,r=0;ra?"".concat(e-1):"".concat(e)).concat(r);else if(r="".concat(e-1).concat(r),a-parseInt(r,10)<100)return!1}if(60?,.\/ ]$/,qe={minLength:8,minLowercase:1,minUppercase:1,minNumbers:1,minSymbols:1,returnScore:!1,pointsPerUnique:1,pointsPerRepeat:.5,pointsForContainingLower:10,pointsForContainingUpper:10,pointsForContainingNumber:10,pointsForContainingSymbol:10};function tr(t){var e,r,n=(e=t,r={},Array.from(e).forEach(function(t){r[t]?r[t]+=1:r[t]=1}),r),i={length:t.length,uniqueChars:Object.keys(n).length,uppercaseCount:0,lowercaseCount:0,numberCount:0,symbolCount:0};return Object.keys(n).forEach(function(t){Xe.test(t)?i.uppercaseCount+=n[t]:Je.test(t)?i.lowercaseCount+=n[t]:je.test(t)?i.numberCount+=n[t]:Qe.test(t)&&(i.symbolCount+=n[t])}),i}var er={GB:/^GB((\d{3} \d{4} ([0-8][0-9]|9[0-6]))|(\d{9} \d{3})|(((GD[0-4])|(HA[5-9]))[0-9]{2}))$/,IT:/^(IT)?[0-9]{11}$/,NL:/^(NL)?[0-9]{9}B[0-9]{2}$/};return{version:"13.7.0",toDate:a,toFloat:Z,toInt:function(t,e){return l(t),parseInt(t,e||10)},toBoolean:function(t,e){return l(t),e?"1"===t||/^true$/i.test(t):"0"!==t&&!/^false$/i.test(t)&&""!==t},equals:function(t,e){return l(t),t===e},contains:function(t,e,r){return l(t),(r=B(r,N)).ignoreCase?t.toLowerCase().split(M(e).toLowerCase()).length>r.minOccurrences:t.split(M(e)).length>r.minOccurrences},matches:function(t,e,r){return l(t),(e="[object RegExp]"!==Object.prototype.toString.call(e)?new RegExp(e,r):e).test(t)},isEmail:function(t,e){if(l(t),(e=B(e,b)).require_display_name||e.allow_display_name){var r=t.match(U);if(r){var n=r[1];if(t=t.replace(n,"").replace(/(^<|>$)/g,""),!function(t){var e=t.replace(/^"(.+)"$/,"$1");if(e.trim()){if(/[\.";<>]/.test(e)){if(e===t)return;if(!(e.split('"').length===e.split('\\"').length))return}return 1}}(n=n.endsWith(" ")?n.substr(0,n.length-1):n))return!1}else if(e.require_display_name)return!1}if(!e.ignore_max_length&&254]/.test(t))return!1;if(0===t.indexOf("mailto:"))return!1;if((e=B(e,x)).validate_length&&2083<=t.length)return!1;if(!e.allow_fragments&&t.includes("#"))return!1;if(!e.allow_query_components&&(t.includes("?")||t.includes("&")))return!1;var r,n,i=t.split("#");if(1<(i=(t=(i=(t=i.shift()).split("?")).shift()).split("://")).length){if(r=i.shift().toLowerCase(),e.require_valid_protocol&&-1===e.protocols.indexOf(r))return!1}else{if(e.require_protocol)return!1;if("//"===t.substr(0,2)){if(!e.allow_protocol_relative_urls)return!1;i[0]=t.substr(2)}}if(""===(t=i.join("://")))return!1;if(""===(t=(i=t.split("/")).shift())&&!e.require_host)return!0;if(1<(i=t.split("@")).length){if(e.disallow_auth)return!1;if(""===i[0])return!1;if(0<=(c=i.shift()).indexOf(":")&&2/g,">").replace(/\//g,"/").replace(/\\/g,"\").replace(/`/g,"`")},unescape:function(t){return l(t),t.replace(/"/g,'"').replace(/'/g,"'").replace(/</g,"<").replace(/>/g,">").replace(///g,"/").replace(/\/g,"\\").replace(/`/g,"`").replace(/&/g,"&")},stripLow:function(t,e){return l(t),Ke(t,e?"\\x00-\\x09\\x0B\\x0C\\x0E-\\x1F\\x7F":"\\x00-\\x1F\\x7F")},whitelist:function(t,e){return l(t),t.replace(new RegExp("[^".concat(e,"]+"),"g"),"")},blacklist:Ke,isWhitelisted:function(t,e){l(t);for(var r=t.length-1;0<=r;r--)if(-1===e.indexOf(t[r]))return!1;return!0},normalizeEmail:function(t,e){e=B(e,we);var r=t.split("@"),t=r.pop();if((r=[r.join("@"),t])[1]=r[1].toLowerCase(),"gmail.com"===r[1]||"googlemail.com"===r[1]){if(e.gmail_remove_subaddress&&(r[0]=r[0].split("+")[0]),e.gmail_remove_dots&&(r[0]=r[0].replace(/\.+/g,Ve)),!r[0].length)return!1;(e.all_lowercase||e.gmail_lowercase)&&(r[0]=r[0].toLowerCase()),r[1]=e.gmail_convert_googlemaildotcom?"gmail.com":r[1]}else if(0<=ye.indexOf(r[1])){if(e.icloud_remove_subaddress&&(r[0]=r[0].split("+")[0]),!r[0].length)return!1;(e.all_lowercase||e.icloud_lowercase)&&(r[0]=r[0].toLowerCase())}else if(0<=We.indexOf(r[1])){if(e.outlookdotcom_remove_subaddress&&(r[0]=r[0].split("+")[0]),!r[0].length)return!1;(e.all_lowercase||e.outlookdotcom_lowercase)&&(r[0]=r[0].toLowerCase())}else if(0<=Ye.indexOf(r[1])){if(e.yahoo_remove_subaddress&&(t=r[0].split("-"),r[0]=1=e.minLength&&i.lowercaseCount>=e.minLowercase&&i.uppercaseCount>=e.minUppercase&&i.numberCount>=e.minNumbers&&i.symbolCount>=e.minSymbols},isTaxID:function(t){var e=1 (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "homepage": "https://github.com/npm/wrappy" +} diff --git a/node_modules/wrappy/wrappy.js b/node_modules/wrappy/wrappy.js new file mode 100644 index 0000000..bb7e7d6 --- /dev/null +++ b/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/node_modules/yallist/LICENSE b/node_modules/yallist/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/yallist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/yallist/README.md b/node_modules/yallist/README.md new file mode 100644 index 0000000..f586101 --- /dev/null +++ b/node_modules/yallist/README.md @@ -0,0 +1,204 @@ +# yallist + +Yet Another Linked List + +There are many doubly-linked list implementations like it, but this +one is mine. + +For when an array would be too big, and a Map can't be iterated in +reverse order. + + +[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) + +## basic usage + +```javascript +var yallist = require('yallist') +var myList = yallist.create([1, 2, 3]) +myList.push('foo') +myList.unshift('bar') +// of course pop() and shift() are there, too +console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] +myList.forEach(function (k) { + // walk the list head to tail +}) +myList.forEachReverse(function (k, index, list) { + // walk the list tail to head +}) +var myDoubledList = myList.map(function (k) { + return k + k +}) +// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] +// mapReverse is also a thing +var myDoubledListReverse = myList.mapReverse(function (k) { + return k + k +}) // ['foofoo', 6, 4, 2, 'barbar'] + +var reduced = myList.reduce(function (set, entry) { + set += entry + return set +}, 'start') +console.log(reduced) // 'startfoo123bar' +``` + +## api + +The whole API is considered "public". + +Functions with the same name as an Array method work more or less the +same way. + +There's reverse versions of most things because that's the point. + +### Yallist + +Default export, the class that holds and manages a list. + +Call it with either a forEach-able (like an array) or a set of +arguments, to initialize the list. + +The Array-ish methods all act like you'd expect. No magic length, +though, so if you change that it won't automatically prune or add +empty spots. + +### Yallist.create(..) + +Alias for Yallist function. Some people like factories. + +#### yallist.head + +The first node in the list + +#### yallist.tail + +The last node in the list + +#### yallist.length + +The number of nodes in the list. (Change this at your peril. It is +not magic like Array length.) + +#### yallist.toArray() + +Convert the list to an array. + +#### yallist.forEach(fn, [thisp]) + +Call a function on each item in the list. + +#### yallist.forEachReverse(fn, [thisp]) + +Call a function on each item in the list, in reverse order. + +#### yallist.get(n) + +Get the data at position `n` in the list. If you use this a lot, +probably better off just using an Array. + +#### yallist.getReverse(n) + +Get the data at position `n`, counting from the tail. + +#### yallist.map(fn, thisp) + +Create a new Yallist with the result of calling the function on each +item. + +#### yallist.mapReverse(fn, thisp) + +Same as `map`, but in reverse. + +#### yallist.pop() + +Get the data from the list tail, and remove the tail from the list. + +#### yallist.push(item, ...) + +Insert one or more items to the tail of the list. + +#### yallist.reduce(fn, initialValue) + +Like Array.reduce. + +#### yallist.reduceReverse + +Like Array.reduce, but in reverse. + +#### yallist.reverse + +Reverse the list in place. + +#### yallist.shift() + +Get the data from the list head, and remove the head from the list. + +#### yallist.slice([from], [to]) + +Just like Array.slice, but returns a new Yallist. + +#### yallist.sliceReverse([from], [to]) + +Just like yallist.slice, but the result is returned in reverse. + +#### yallist.toArray() + +Create an array representation of the list. + +#### yallist.toArrayReverse() + +Create a reversed array representation of the list. + +#### yallist.unshift(item, ...) + +Insert one or more items to the head of the list. + +#### yallist.unshiftNode(node) + +Move a Node object to the front of the list. (That is, pull it out of +wherever it lives, and make it the new head.) + +If the node belongs to a different list, then that list will remove it +first. + +#### yallist.pushNode(node) + +Move a Node object to the end of the list. (That is, pull it out of +wherever it lives, and make it the new tail.) + +If the node belongs to a list already, then that list will remove it +first. + +#### yallist.removeNode(node) + +Remove a node from the list, preserving referential integrity of head +and tail and other nodes. + +Will throw an error if you try to have a list remove a node that +doesn't belong to it. + +### Yallist.Node + +The class that holds the data and is actually the list. + +Call with `var n = new Node(value, previousNode, nextNode)` + +Note that if you do direct operations on Nodes themselves, it's very +easy to get into weird states where the list is broken. Be careful :) + +#### node.next + +The next node in the list. + +#### node.prev + +The previous node in the list. + +#### node.value + +The data the node contains. + +#### node.list + +The list to which this node belongs. (Null if it does not belong to +any list.) diff --git a/node_modules/yallist/iterator.js b/node_modules/yallist/iterator.js new file mode 100644 index 0000000..d41c97a --- /dev/null +++ b/node_modules/yallist/iterator.js @@ -0,0 +1,8 @@ +'use strict' +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} diff --git a/node_modules/yallist/package.json b/node_modules/yallist/package.json new file mode 100644 index 0000000..8a08386 --- /dev/null +++ b/node_modules/yallist/package.json @@ -0,0 +1,29 @@ +{ + "name": "yallist", + "version": "4.0.0", + "description": "Yet Another Linked List", + "main": "yallist.js", + "directories": { + "test": "test" + }, + "files": [ + "yallist.js", + "iterator.js" + ], + "dependencies": {}, + "devDependencies": { + "tap": "^12.1.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/yallist/yallist.js b/node_modules/yallist/yallist.js new file mode 100644 index 0000000..4e83ab1 --- /dev/null +++ b/node_modules/yallist/yallist.js @@ -0,0 +1,426 @@ +'use strict' +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null + + return next +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1 + } + if (start < 0) { + start = this.length + start; + } + + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next + } + + var ret = [] + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value) + walker = this.removeNode(walker) + } + if (walker === null) { + walker = this.tail + } + + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev + } + + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]) + } + return ret; +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self) + + if (inserted.next === null) { + self.tail = inserted + } + if (inserted.prev === null) { + self.head = inserted + } + + self.length++ + + return inserted +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} + +try { + // add if support for Symbol.iterator is present + require('./iterator.js')(Yallist) +} catch (er) {} diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..6c2d830 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,1055 @@ +{ + "name": "flutterflow-socket-example", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "dependencies": { + "github-auto-sync": "file:tools/github", + "prefix": "^1.0.0" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "node_modules/@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "dependencies": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "dependencies": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "dependencies": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "node_modules/@types/node": { + "version": "18.11.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.3.tgz", + "integrity": "sha512-fNjDQzzOsZeKZu5NATgXUPsaFaTxeRgFXoosrHivTl8RGeV733OLawXsGfEk9a8/tySyZUyiZ6E8LcjPFZ2y1A==", + "dev": true + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "node_modules/cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "node_modules/dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "dependencies": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formidable/node_modules/qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==", + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/github-auto-sync": { + "resolved": "tools/github", + "link": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "engines": { + "node": ">=8" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prefix/-/prefix-1.0.0.tgz", + "integrity": "sha512-KtGt8TV4nQkYgO6wJ5phkBGAECZW5cs5vQhqnz1LlxGGd8y4LwpW3Bq5pofULjdiMXACDCxisDUhwtFXb7vHfQ==" + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "dependencies": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "bin": { + "smee": "bin/smee.js" + } + }, + "node_modules/smee-client/node_modules/eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/superagent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "tools/github": { + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "@octokit/webhooks": "^10.3.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + }, + "devDependencies": { + "@types/node": "^18.11.0" + } + } + }, + "dependencies": { + "@octokit/openapi-types": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz", + "integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw==" + }, + "@octokit/request-error": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.2.tgz", + "integrity": "sha512-WMNOFYrSaX8zXWoJg9u/pKgWPo94JXilMLb2VManNOby9EZxrQaBe/QSC4a1TzpAlpxofg2X/jMnCyZgL6y7eg==", + "requires": { + "@octokit/types": "^8.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + } + }, + "@octokit/types": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz", + "integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==", + "requires": { + "@octokit/openapi-types": "^14.0.0" + } + }, + "@octokit/webhooks": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-10.3.0.tgz", + "integrity": "sha512-6k4dNsTjNhoJNJ56RKkWCMXEj32Cgvy4hIsV3fhK60hKbEfI1/rkx7tF87Gqtf1Xg5UVQfFmYygaBPGQCr0TmQ==", + "requires": { + "@octokit/request-error": "^3.0.0", + "@octokit/webhooks-methods": "^3.0.0", + "@octokit/webhooks-types": "6.5.0", + "aggregate-error": "^3.1.0" + } + }, + "@octokit/webhooks-methods": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-3.0.1.tgz", + "integrity": "sha512-XftYVcBxtzC2G05kdBNn9IYLtQ+Cz6ufKkjZd0DU/qGaZEFTPzM2OabXAWG5tvL0q/I+Exio1JnRiPfetiMSEw==" + }, + "@octokit/webhooks-types": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.5.0.tgz", + "integrity": "sha512-SNx1YKBvi9IQvXo/SQ0p+9OKO2HMdzpCWcKsYxpGW1tlkE9TojYiGnFfxcXddyrsK4mC1UiyXY8+NYjWjtkVmA==" + }, + "@types/node": { + "version": "18.11.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.3.tgz", + "integrity": "sha512-fNjDQzzOsZeKZu5NATgXUPsaFaTxeRgFXoosrHivTl8RGeV733OLawXsGfEk9a8/tySyZUyiZ6E8LcjPFZ2y1A==", + "dev": true + }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "requires": { + "safe-buffer": "5.1.2" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" + }, + "combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "cookiejar": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", + "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + }, + "dezalgo": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.3.tgz", + "integrity": "sha512-K7i4zNfT2kgQz3GylDw40ot9GAE47sFZ9EXHFSPP6zONLgH6kWXE0KWJchkbQJLBkRazq4APwZ4OwiFFlT95OQ==", + "requires": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==" + }, + "fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "formidable": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.0.1.tgz", + "integrity": "sha512-rjTMNbp2BpfQShhFbR3Ruk3qk2y9jKpvMW78nJgx8QKtxjDVrwbZG+wvDOmVbifHyOUOQJXxqEy6r0faRrPzTQ==", + "requires": { + "dezalgo": "1.0.3", + "hexoid": "1.0.0", + "once": "1.4.0", + "qs": "6.9.3" + }, + "dependencies": { + "qs": { + "version": "6.9.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.3.tgz", + "integrity": "sha512-EbZYNarm6138UKKq46tdx08Yo/q9ZhFoAXAI1meAFd2GtbRDhbZY2WQSICskT0c5q99aFzLG1D4nvTk9tqfXIw==" + } + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "github-auto-sync": { + "version": "file:tools/github", + "requires": { + "@octokit/webhooks": "^10.3.0", + "@types/node": "^18.11.0", + "eventsource": "^2.0.2", + "smee-client": "^1.2.3" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==" + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" + }, + "mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==" + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "requires": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==", + "requires": { + "ee-first": "1.1.1" + } + }, + "on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prefix/-/prefix-1.0.0.tgz", + "integrity": "sha512-KtGt8TV4nQkYgO6wJ5phkBGAECZW5cs5vQhqnz1LlxGGd8y4LwpW3Bq5pofULjdiMXACDCxisDUhwtFXb7vHfQ==" + }, + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "smee-client": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/smee-client/-/smee-client-1.2.3.tgz", + "integrity": "sha512-uDrU8u9/Ln7aRXyzGHgVaNUS8onHZZeSwQjCdkMoSL7U85xI+l+Y2NgjibkMJAyXkW7IAbb8rw9RMHIjS6lAwA==", + "requires": { + "commander": "^2.19.0", + "eventsource": "^1.1.0", + "morgan": "^1.9.1", + "superagent": "^7.1.3", + "validator": "^13.7.0" + }, + "dependencies": { + "eventsource": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.1.2.tgz", + "integrity": "sha512-xAH3zWhgO2/3KIniEKYPr8plNSzlGINOUqYj0m0u7AB81iRw8b/3E73W6AuU+6klLbaSFmZnaETQ2lXPfAydrA==" + } + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } + } + }, + "superagent": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-7.1.5.tgz", + "integrity": "sha512-HQYyGuDRFGmZ6GNC4hq2f37KnsY9Lr0/R1marNZTgMweVDQLTLJJ6DGQ9Tj/xVVs5HEnop9EMmTbywb5P30aqw==", + "requires": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.3", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.0.1", + "methods": "^1.1.2", + "mime": "^2.5.0", + "qs": "^6.10.3", + "readable-stream": "^3.6.0", + "semver": "^7.3.7" + }, + "dependencies": { + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..3aac56d --- /dev/null +++ b/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "github-auto-sync": "file:tools/github", + "prefix": "^1.0.0" + } +} diff --git a/watch b/watch index 5062481..b584e06 100755 --- a/watch +++ b/watch @@ -1,7 +1,7 @@ #!/bin/bash # ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/" +export GITHUB_WEBHOOK_URL="https://smee.io/aLVrM23iI4IPWC5" export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" # ------- END CONFIGURATION ------------ From 4abfd95e7d9a4b96ce33a8ca9f8fa3e58baf60b4 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Mon, 7 Nov 2022 14:20:25 +0100 Subject: [PATCH 09/11] Installed flutterflow-socket --- watch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/watch b/watch index b584e06..5062481 100755 --- a/watch +++ b/watch @@ -1,7 +1,7 @@ #!/bin/bash # ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/aLVrM23iI4IPWC5" +export GITHUB_WEBHOOK_URL="https://smee.io/" export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" # ------- END CONFIGURATION ------------ From 2691a7847c190c0764a765339946db3bde8525e1 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Mon, 7 Nov 2022 14:24:20 +0100 Subject: [PATCH 10/11] Local changes before sync --- lib/home_page/home_page_widget.dart | 2 +- watch | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/home_page/home_page_widget.dart b/lib/home_page/home_page_widget.dart index 24e2582..beb3a87 100644 --- a/lib/home_page/home_page_widget.dart +++ b/lib/home_page/home_page_widget.dart @@ -43,7 +43,7 @@ class _HomePageWidgetState extends State { child: Align( alignment: AlignmentDirectional(0, 0), child: Text( - 'Change No. 005', + 'Change No. 006 locally', style: FlutterFlowTheme.of(context).title1, ), ), diff --git a/watch b/watch index 5062481..b584e06 100755 --- a/watch +++ b/watch @@ -1,7 +1,7 @@ #!/bin/bash # ------- CONFIGURATION ---------------- -export GITHUB_WEBHOOK_URL="https://smee.io/" +export GITHUB_WEBHOOK_URL="https://smee.io/aLVrM23iI4IPWC5" export GITHUB_WEBHOOK_SECRET="ff-my-github-webhook-secret" # ------- END CONFIGURATION ------------ From 796e62292c70e50968adf4515d5e507f2b5ca3b8 Mon Sep 17 00:00:00 2001 From: Matija Stepanic Date: Mon, 7 Nov 2022 17:25:07 +0100 Subject: [PATCH 11/11] Local changes before sync --- run | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run b/run index ee3a3b5..47fb3ef 100755 --- a/run +++ b/run @@ -10,4 +10,4 @@ then fi # $1 is the flutter device ID -flutter run -d "$1" --pid-file /tmp/flutter.pid \ No newline at end of file +flutter run -d "$1" --pid-file "/tmp/flutter-$1.pid" \ No newline at end of file